Compare commits
345 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
01ee524049 | ||
|
|
af9cb65338 | ||
|
|
8aa11c580b | ||
|
|
ada627f444 | ||
|
|
a7b6d338c3 | ||
|
|
9f00538b97 | ||
|
|
a085015282 | ||
|
|
0b9c220fbb | ||
|
|
0e3d04873d | ||
|
|
b7578d939f | ||
|
|
b5c28de03f | ||
|
|
e17d25c156 | ||
|
|
c25dc1b99c | ||
|
|
a493a574bd | ||
|
|
4284493dce | ||
|
|
25059de8e1 | ||
|
|
1731b05ad0 | ||
|
|
e80dc663ac | ||
|
|
39988a4c2f | ||
|
|
415bff303a | ||
|
|
a65eb62a54 | ||
|
|
03b2982128 | ||
|
|
bff0527857 | ||
|
|
f3b7634254 | ||
|
|
6a9593c0b9 | ||
|
|
edb785b8e5 | ||
|
|
26d757b50a | ||
|
|
535079ee87 | ||
|
|
ac380c29c1 | ||
|
|
3fd212f26c | ||
|
|
04a3abc651 | ||
|
|
6caf85ddd1 | ||
|
|
16e4071508 | ||
|
|
69e7c4324b | ||
|
|
a1c4a8cbe5 | ||
|
|
e37f6cfda7 | ||
|
|
989c804409 | ||
|
|
7345bc3c82 | ||
|
|
69bee35700 | ||
|
|
598e24df7c | ||
|
|
0ae669201e | ||
|
|
f52a8a4642 | ||
|
|
9c40b61ef2 | ||
|
|
72dabcda83 | ||
|
|
161a06dbcc | ||
|
|
8ed3d4e70c | ||
|
|
a4223ccc8a | ||
|
|
ca85923855 | ||
|
|
52bfe7c493 | ||
|
|
4786bd0cbe | ||
|
|
cadab160ff | ||
|
|
6a7f17b2b0 | ||
|
|
4986a4d775 | ||
|
|
903af0c2cf | ||
|
|
3282fa803c | ||
|
|
67cc47608d | ||
|
|
0411704b8b | ||
|
|
1de85b2c69 | ||
|
|
33b012f29d | ||
|
|
1357584df3 | ||
|
|
e15809e271 | ||
|
|
0da1950427 | ||
|
|
e590b921be | ||
|
|
09462692f5 | ||
|
|
c1d1b5f762 | ||
|
|
6b9c87b858 | ||
|
|
485b6eb904 | ||
|
|
057630bdb5 | ||
|
|
6b02873b30 | ||
|
|
0fa0fc6d6b | ||
|
|
339ec07465 | ||
|
|
cd2e798fea | ||
|
|
d5cadbeae2 | ||
|
|
8046a3ccae | ||
|
|
bf91d60b31 | ||
|
|
539c047ec8 | ||
|
|
290c18fa87 | ||
|
|
98c46f5e57 | ||
|
|
f8bd5b5b4e | ||
|
|
816d32edad | ||
|
|
8453835c05 | ||
|
|
9328c356c8 | ||
|
|
89e3c1fc94 | ||
|
|
67e54cd15d | ||
|
|
278ea24786 | ||
|
|
aba1662631 | ||
|
|
61eeb60c19 | ||
|
|
5e9a8f4806 | ||
|
|
4cb274e9bc | ||
|
|
8b9b1a6a35 | ||
|
|
2655964113 | ||
|
|
188bad061b | ||
|
|
3af4c329aa | ||
|
|
6c13395f7d | ||
|
|
77b32ba360 | ||
|
|
91dba291ac | ||
|
|
a6bc293640 | ||
|
|
53882d6e5f | ||
|
|
d68adfbf10 | ||
|
|
498a392d7f | ||
|
|
740f6c05db | ||
|
|
d810ce301f | ||
|
|
5ef6a14d24 | ||
|
|
a13f6f1e68 | ||
|
|
d2d0f1aaee | ||
|
|
e64c72cc89 | ||
|
|
9ab915a08b | ||
|
|
e26fbf0328 | ||
|
|
d9a52c4a2a | ||
|
|
7b2ec90de9 | ||
|
|
d310bf8bbf | ||
|
|
2abc6cc939 | ||
|
|
56d4e694a2 | ||
|
|
5f002c9cdc | ||
|
|
759daf4b4a | ||
|
|
3a8d9568e3 | ||
|
|
ff22a9d94a | ||
|
|
a6e42d5374 | ||
|
|
a2f74e0488 | ||
|
|
ee44240569 | ||
|
|
d0828744a2 | ||
|
|
6e2e576b29 | ||
|
|
bf61e27f8a | ||
|
|
c441c30b46 | ||
|
|
0e741230ea | ||
|
|
1bfe9ac2db | ||
|
|
6812e72348 | ||
|
|
b6449d2f5b | ||
|
|
7e3ea20dce | ||
|
|
c9d6fe9dcd | ||
|
|
4a649a6b8b | ||
|
|
8fef184963 | ||
|
|
69583ca3c0 | ||
|
|
6038a68e91 | ||
|
|
fa8bd8db87 | ||
|
|
18b4f0ed0f | ||
|
|
461f9d66c9 | ||
|
|
2155103c7a | ||
|
|
c9a6839c45 | ||
|
|
9fbe331a80 | ||
|
|
a56389c4ce | ||
|
|
64656784cb | ||
|
|
6eff2c181e | ||
|
|
1aa48c6d62 | ||
|
|
c7ca1a346d | ||
|
|
fa0ec7b502 | ||
|
|
768438c136 | ||
|
|
9badea0b3c | ||
|
|
43263a1650 | ||
|
|
821e02dc75 | ||
|
|
ed011ecf28 | ||
|
|
d861de4c2f | ||
|
|
3a3b2449dc | ||
|
|
d2614406ca | ||
|
|
0798d098ae | ||
|
|
dab7ddc2bb | ||
|
|
081a96e281 | ||
|
|
a7dd881d79 | ||
|
|
8134d5e24d | ||
|
|
ba6756cd45 | ||
|
|
5d8fce21ac | ||
|
|
e7e4a5bcd4 | ||
|
|
55f33357ea | ||
|
|
90568bba31 | ||
|
|
5d6e2dc2e4 | ||
|
|
6bb33f2559 | ||
|
|
ced92554ed | ||
|
|
dff3383158 | ||
|
|
bf03c89cb2 | ||
|
|
9f1484bbef | ||
|
|
3899680e26 | ||
|
|
6bb2eb25a1 | ||
|
|
f8dfd8edb3 | ||
|
|
042be624a3 | ||
|
|
6bafa4c79a | ||
|
|
58b42fac5c | ||
|
|
3b47b9558a | ||
|
|
ccf9636296 | ||
|
|
96942719f2 | ||
|
|
69cf1c1adc | ||
|
|
d77cba40b8 | ||
|
|
968735b555 | ||
|
|
ceed9d29eb | ||
|
|
41329039ee | ||
|
|
f68b102ca8 | ||
|
|
fa36e54298 | ||
|
|
b689f57435 | ||
|
|
885fa0ff56 | ||
|
|
303acb72a3 | ||
|
|
b2a46cd0cd | ||
|
|
5a5ecb3ee3 | ||
|
|
60b4ab6a63 | ||
|
|
e4b096a08f | ||
|
|
343f55049b | ||
|
|
6b46025261 | ||
|
|
5ea503f23e | ||
|
|
ce95f9ac23 | ||
|
|
c3fb87501b | ||
|
|
dc6a343612 | ||
|
|
3a61053957 | ||
|
|
570129e4d4 | ||
|
|
3315c7045f | ||
|
|
5ae50e242c | ||
|
|
bbcf449719 | ||
|
|
aab10f7184 | ||
|
|
8d43488cb8 | ||
|
|
0a9c647e19 | ||
|
|
40db5d4aa8 | ||
|
|
9254532baa | ||
|
|
7abed47cf0 | ||
|
|
5c6ac758f7 | ||
|
|
007677962c | ||
|
|
9c4aeab64a | ||
|
|
48e6fc0efe | ||
|
|
c8be713d11 | ||
|
|
ae887c8648 | ||
|
|
5daac2531b | ||
|
|
68def00327 | ||
|
|
67e7976710 | ||
|
|
35747e937e | ||
|
|
fb439787a4 | ||
|
|
8fa368f473 | ||
|
|
c84a9d07b1 | ||
|
|
7fb46cdfc4 | ||
|
|
52985e5ddc | ||
|
|
e880935dc3 | ||
|
|
cc22b1bca5 | ||
|
|
49a5128918 | ||
|
|
fedc7dcb44 | ||
|
|
cd32b20215 | ||
|
|
15cd9832c4 | ||
|
|
f25d4e4553 | ||
|
|
12d1c82b63 | ||
|
|
aebe855078 | ||
|
|
3416a71ebd | ||
|
|
94b3fea528 | ||
|
|
ad1a9ecca1 | ||
|
|
715accfb8a | ||
|
|
a8e03c6138 | ||
|
|
f69446b648 | ||
|
|
eedfbe5846 | ||
|
|
153351cc9f | ||
|
|
1b1eec40a7 | ||
|
|
763877541a | ||
|
|
1fad7d72a2 | ||
|
|
51ea2ea879 | ||
|
|
d77a478bf0 | ||
|
|
e413c0264a | ||
|
|
f88e7f898c | ||
|
|
d07bd4a6db | ||
|
|
fb34c099d5 | ||
|
|
1d2ee56a15 | ||
|
|
86665f7f09 | ||
|
|
0d2b4af986 | ||
|
|
dc2b2eeb9f | ||
|
|
e5dbb66d53 | ||
|
|
3474b1c471 | ||
|
|
3886de5b7c | ||
|
|
2b3cec06b3 | ||
|
|
8536754d14 | ||
|
|
1f36235801 | ||
|
|
a4194b14f9 | ||
|
|
2dcc629d9d | ||
|
|
98ddadc6bc | ||
|
|
f6e47b7383 | ||
|
|
f073ddc906 | ||
|
|
3e00631925 | ||
|
|
9b7ac58562 | ||
|
|
f242ddd801 | ||
|
|
c129886fe2 | ||
|
|
f577e814cf | ||
|
|
c860a0cedd | ||
|
|
ae7e28e492 | ||
|
|
90a63234ad | ||
|
|
14bca52e8f | ||
|
|
2f3c3361cf | ||
|
|
4034134055 | ||
|
|
c04f94cb7b | ||
|
|
fd1bbc7925 | ||
|
|
ff69bed394 | ||
|
|
d6e8c5146f | ||
|
|
9a04cf99d7 | ||
|
|
86e7c11e71 | ||
|
|
361cc08faa | ||
|
|
70dc771052 | ||
|
|
c14873a799 | ||
|
|
bba5abd74b | ||
|
|
a224e79c1f | ||
|
|
c305d98186 | ||
|
|
7c5a473e71 | ||
|
|
5e0f5d1eed | ||
|
|
238b269bc4 | ||
|
|
0ad121b9d2 | ||
|
|
7088acd9fd | ||
|
|
e0a900d4b6 | ||
|
|
a0fe2f0c7d | ||
|
|
d5b9bc2f26 | ||
|
|
584254e6ca | ||
|
|
a2963ed7bb | ||
|
|
2a3c2e133d | ||
|
|
3e7dcb2755 | ||
|
|
faeec00b39 | ||
|
|
eeed81392f | ||
|
|
95dce9e992 | ||
|
|
502bd2a191 | ||
|
|
17ac92a9d0 | ||
|
|
ba028cde0c | ||
|
|
6e751e7a9b | ||
|
|
948b56d0e6 | ||
|
|
4bf2dc9ece | ||
|
|
125823f8ab | ||
|
|
24d33397e9 | ||
|
|
2c553825f4 | ||
|
|
198c485e9a | ||
|
|
0138505507 | ||
|
|
5d50dcc600 | ||
|
|
7bdd8c4626 | ||
|
|
fc82c35f0c | ||
|
|
426ebad300 | ||
|
|
1afe61c593 | ||
|
|
c20751829b | ||
|
|
a3b8ee8392 | ||
|
|
156c0fe7f6 | ||
|
|
216f7a38cf | ||
|
|
fd04dc10d4 | ||
|
|
d39bdce926 | ||
|
|
c6e01245b0 | ||
|
|
c168ee7ba4 | ||
|
|
7575253000 | ||
|
|
c28c1efbb1 | ||
|
|
e6aa2c3b78 | ||
|
|
ab7c481f83 | ||
|
|
84ad1c352d | ||
|
|
e9aad39ac9 | ||
|
|
c3444a87bc | ||
|
|
67b224b340 | ||
|
|
bded14d36b | ||
|
|
73fa0b6631 | ||
|
|
2f07337588 | ||
|
|
da163d44e7 | ||
|
|
56fbf8ae0c | ||
|
|
327eb4b39b | ||
|
|
ae7873a7e3 | ||
|
|
9a5f01813b | ||
|
|
0605a3b725 |
28
.devcontainer/.env.example
Normal file
28
.devcontainer/.env.example
Normal file
@@ -0,0 +1,28 @@
|
||||
COMPOSE_PROJECT_NAME=trmm
|
||||
|
||||
IMAGE_REPO=tacticalrmm/
|
||||
VERSION=latest
|
||||
|
||||
# tactical credentials (Used to login to dashboard)
|
||||
TRMM_USER=tactical
|
||||
TRMM_PASS=tactical
|
||||
|
||||
# dns settings
|
||||
APP_HOST=rmm.example.com
|
||||
API_HOST=api.example.com
|
||||
MESH_HOST=mesh.example.com
|
||||
|
||||
# mesh settings
|
||||
MESH_USER=tactical
|
||||
MESH_PASS=tactical
|
||||
MONGODB_USER=mongouser
|
||||
MONGODB_PASSWORD=mongopass
|
||||
|
||||
# database settings
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASS=postgrespass
|
||||
|
||||
# DEV SETTINGS
|
||||
APP_PORT=8000
|
||||
API_PORT=8080
|
||||
HTTP_PROTOCOL=https
|
||||
28
.devcontainer/api.dockerfile
Normal file
28
.devcontainer/api.dockerfile
Normal file
@@ -0,0 +1,28 @@
|
||||
FROM python:3.8-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
ENV WORKSPACE_DIR /workspace
|
||||
ENV TACTICAL_USER tactical
|
||||
ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# Copy Go Files
|
||||
COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||
|
||||
# Copy Dev python reqs
|
||||
COPY ./requirements.txt /
|
||||
|
||||
# Copy Docker Entrypoint
|
||||
COPY ./entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm
|
||||
19
.devcontainer/docker-compose.debug.yml
Normal file
19
.devcontainer/docker-compose.debug.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
version: '3.4'
|
||||
|
||||
services:
|
||||
api-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 --nothreading --noreload"]
|
||||
ports:
|
||||
- 8000:8000
|
||||
- 5678:5678
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-backend
|
||||
209
.devcontainer/docker-compose.yml
Normal file
209
.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,209 @@
|
||||
version: '3.4'
|
||||
|
||||
services:
|
||||
api-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-api"]
|
||||
environment:
|
||||
API_PORT: ${API_PORT}
|
||||
ports:
|
||||
- "8000:${API_PORT}"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-backend
|
||||
|
||||
app-dev:
|
||||
image: node:12-alpine
|
||||
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
working_dir: /workspace/web
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
ports:
|
||||
- "8080:${APP_PORT}"
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-frontend
|
||||
|
||||
# nats
|
||||
nats-dev:
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
API_HOST: ${API_HOST}
|
||||
API_PORT: ${API_PORT}
|
||||
DEV: 1
|
||||
ports:
|
||||
- "4222:4222"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- ${API_HOST}
|
||||
- tactical-nats
|
||||
|
||||
# meshcentral container
|
||||
meshcentral-dev:
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
MONGODB_USER: ${MONGODB_USER}
|
||||
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||
NGINX_HOST_IP: 172.21.0.20
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-meshcentral
|
||||
- ${MESH_HOST}
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- mesh-data-dev:/home/node/app/meshcentral-data
|
||||
depends_on:
|
||||
- mongodb-dev
|
||||
|
||||
# mongodb container for meshcentral
|
||||
mongodb-dev:
|
||||
image: mongo:4.4
|
||||
restart: always
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
|
||||
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
|
||||
MONGO_INITDB_DATABASE: meshcentral
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-mongodb
|
||||
volumes:
|
||||
- mongo-dev-data:/data/db
|
||||
|
||||
# postgres database for api service
|
||||
postgres-dev:
|
||||
image: postgres:13-alpine
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_DB: tacticalrmm
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASS}
|
||||
volumes:
|
||||
- postgres-data-dev:/var/lib/postgresql/data
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-postgres
|
||||
|
||||
# redis container for celery tasks
|
||||
redis-dev:
|
||||
restart: always
|
||||
image: redis:6.0-alpine
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-redis
|
||||
|
||||
init-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
restart: on-failure
|
||||
command: ["tactical-init-dev"]
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASS: ${POSTGRES_PASS}
|
||||
APP_HOST: ${APP_HOST}
|
||||
API_HOST: ${API_HOST}
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
TRMM_USER: ${TRMM_USER}
|
||||
TRMM_PASS: ${TRMM_PASS}
|
||||
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
|
||||
APP_PORT: ${APP_PORT}
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- meshcentral-dev
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
|
||||
# container for celery worker service
|
||||
celery-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celery-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
# container for celery beat service
|
||||
celerybeat-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celerybeat-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
nginx-dev:
|
||||
# container for tactical reverse proxy
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
APP_HOST: ${APP_HOST}
|
||||
API_HOST: ${API_HOST}
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
CERT_PUB_KEY: ${CERT_PUB_KEY}
|
||||
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||
APP_PORT: ${APP_PORT}
|
||||
API_PORT: ${API_PORT}
|
||||
networks:
|
||||
dev:
|
||||
ipv4_address: 172.21.0.20
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
|
||||
volumes:
|
||||
tactical-data-dev:
|
||||
postgres-data-dev:
|
||||
mongo-dev-data:
|
||||
mesh-data-dev:
|
||||
|
||||
networks:
|
||||
dev:
|
||||
driver: bridge
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.21.0.0/24
|
||||
169
.devcontainer/entrypoint.sh
Normal file
169
.devcontainer/entrypoint.sh
Normal file
@@ -0,0 +1,169 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
: "${TRMM_USER:=tactical}"
|
||||
: "${TRMM_PASS:=tactical}"
|
||||
: "${POSTGRES_HOST:=tactical-postgres}"
|
||||
: "${POSTGRES_PORT:=5432}"
|
||||
: "${POSTGRES_USER:=tactical}"
|
||||
: "${POSTGRES_PASS:=tactical}"
|
||||
: "${POSTGRES_DB:=tacticalrmm}"
|
||||
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
||||
: "${MESH_USER:=meshcentral}"
|
||||
: "${MESH_PASS:=meshcentralpass}"
|
||||
: "${MESH_HOST:=tactical-meshcentral}"
|
||||
: "${API_HOST:=tactical-backend}"
|
||||
: "${APP_HOST:=tactical-frontend}"
|
||||
: "${REDIS_HOST:=tactical-redis}"
|
||||
: "${HTTP_PROTOCOL:=http}"
|
||||
: "${APP_PORT:=8080}"
|
||||
: "${API_PORT:=8000}"
|
||||
|
||||
# Add python venv to path
|
||||
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
||||
|
||||
function check_tactical_ready {
|
||||
sleep 15
|
||||
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||
echo "waiting for init container to finish install or update..."
|
||||
sleep 10
|
||||
done
|
||||
}
|
||||
|
||||
function django_setup {
|
||||
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||
echo "waiting for postgresql container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do
|
||||
echo "waiting for meshcentral container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
echo "setting up django environment"
|
||||
|
||||
# configure django settings
|
||||
MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token)
|
||||
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
|
||||
localvars="$(cat << EOF
|
||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||
|
||||
DEBUG = True
|
||||
|
||||
DOCKER_BUILD = True
|
||||
|
||||
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
|
||||
KEY_FILE = '/opt/tactical/certs/privkey.pem'
|
||||
|
||||
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
|
||||
|
||||
ALLOWED_HOSTS = ['${API_HOST}', '*']
|
||||
|
||||
ADMIN_URL = 'admin/'
|
||||
|
||||
CORS_ORIGIN_ALLOW_ALL = True
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': '${POSTGRES_DB}',
|
||||
'USER': '${POSTGRES_USER}',
|
||||
'PASSWORD': '${POSTGRES_PASS}',
|
||||
'HOST': '${POSTGRES_HOST}',
|
||||
'PORT': '${POSTGRES_PORT}',
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
|
||||
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'knox.auth.TokenAuthentication',
|
||||
),
|
||||
}
|
||||
|
||||
if not DEBUG:
|
||||
REST_FRAMEWORK.update({
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
)
|
||||
})
|
||||
|
||||
MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
REDIS_HOST = '${REDIS_HOST}'
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
|
||||
# run migrations and init scripts
|
||||
python manage.py migrate --no-input
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py initial_db_setup
|
||||
python manage.py initial_mesh_setup
|
||||
python manage.py load_chocos
|
||||
python manage.py load_community_scripts
|
||||
python manage.py reload_nats
|
||||
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
|
||||
}
|
||||
|
||||
if [ "$1" = 'tactical-init-dev' ]; then
|
||||
|
||||
# make directories if they don't exist
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
|
||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||
|
||||
# setup Python virtual env and install dependencies
|
||||
test -f ${VIRTUAL_ENV} && python -m venv --copies ${VIRTUAL_ENV}
|
||||
pip install --no-cache-dir -r /requirements.txt
|
||||
|
||||
django_setup
|
||||
|
||||
# create .env file for frontend
|
||||
webenv="$(cat << EOF
|
||||
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
APP_URL = https://${APP_HOST}
|
||||
EOF
|
||||
)"
|
||||
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
||||
|
||||
# chown everything to tactical user
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
|
||||
|
||||
# create install ready file
|
||||
su -c "echo 'tactical-init' > ${TACTICAL_READY_FILE}" "${TACTICAL_USER}"
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-api' ]; then
|
||||
cp ${WORKSPACE_DIR}/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||
chmod +x /usr/local/bin/goversioninfo
|
||||
|
||||
check_tactical_ready
|
||||
python manage.py runserver 0.0.0.0:${API_PORT}
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celery-dev' ]; then
|
||||
check_tactical_ready
|
||||
env/bin/celery -A tacticalrmm worker -l debug
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||
check_tactical_ready
|
||||
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||
env/bin/celery -A tacticalrmm beat -l debug
|
||||
fi
|
||||
44
.devcontainer/requirements.txt
Normal file
44
.devcontainer/requirements.txt
Normal file
@@ -0,0 +1,44 @@
|
||||
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||
amqp==2.6.1
|
||||
asgiref==3.3.1
|
||||
asyncio-nats-client==0.11.4
|
||||
billiard==3.6.3.0
|
||||
celery==4.4.6
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.3
|
||||
chardet==3.0.4
|
||||
cryptography==3.2.1
|
||||
decorator==4.4.2
|
||||
Django==3.1.4
|
||||
django-cors-headers==3.5.0
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.2
|
||||
future==0.18.2
|
||||
idna==2.10
|
||||
kombu==4.6.11
|
||||
loguru==0.5.3
|
||||
msgpack==1.0.0
|
||||
packaging==20.4
|
||||
psycopg2-binary==2.8.6
|
||||
pycparser==2.20
|
||||
pycryptodome==3.9.9
|
||||
pyotp==2.4.1
|
||||
pyparsing==2.4.7
|
||||
pytz==2020.4
|
||||
qrcode==6.1
|
||||
redis==3.5.3
|
||||
requests==2.25.0
|
||||
six==1.15.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.49.0
|
||||
urllib3==1.26.2
|
||||
validators==0.18.1
|
||||
vine==1.3.0
|
||||
websockets==8.1
|
||||
zipp==3.4.0
|
||||
black
|
||||
Werkzeug
|
||||
django-extensions
|
||||
coverage
|
||||
coveralls
|
||||
model_bakery
|
||||
25
.dockerignore
Normal file
25
.dockerignore
Normal file
@@ -0,0 +1,25 @@
|
||||
**/__pycache__
|
||||
**/.classpath
|
||||
**/.dockerignore
|
||||
**/.env
|
||||
**/.git
|
||||
**/.gitignore
|
||||
**/.project
|
||||
**/.settings
|
||||
**/.toolstarget
|
||||
**/.vs
|
||||
**/.vscode
|
||||
**/*.*proj.user
|
||||
**/*.dbmdl
|
||||
**/*.jfm
|
||||
**/azds.yaml
|
||||
**/charts
|
||||
**/docker-compose*
|
||||
**/Dockerfile*
|
||||
**/node_modules
|
||||
**/npm-debug.log
|
||||
**/obj
|
||||
**/secrets.dev.yaml
|
||||
**/values.dev.yaml
|
||||
**/env
|
||||
README.md
|
||||
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: wh1te909
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
78
.github/workflows/docker-build-push.yml
vendored
Normal file
78
.github/workflows/docker-build-push.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
name: Publish Tactical Docker Images
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
jobs:
|
||||
docker:
|
||||
name: Build and Push Docker Images
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get Github Tag
|
||||
id: prep
|
||||
run: |
|
||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/v}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Tactical Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical:${{ steps.prep.outputs.version }},tacticalrmm/tactical:latest
|
||||
|
||||
- name: Build and Push Tactical MeshCentral Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-meshcentral/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-meshcentral:${{ steps.prep.outputs.version }},tacticalrmm/tactical-meshcentral:latest
|
||||
|
||||
- name: Build and Push Tactical NATS Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-nats/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
||||
|
||||
- name: Build and Push Tactical Frontend Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-frontend/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-frontend:${{ steps.prep.outputs.version }},tacticalrmm/tactical-frontend:latest
|
||||
|
||||
- name: Build and Push Tactical Nginx Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-nginx/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nginx:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nginx:latest
|
||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -14,6 +14,20 @@
|
||||
"0.0.0.0:8000"
|
||||
],
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Django: Docker Remote Attach",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"port": 5678,
|
||||
"host": "localhost",
|
||||
"preLaunchTask": "docker debug",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}/api/tacticalrmm",
|
||||
"remoteRoot": "/workspace/api/tacticalrmm"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
21
.vscode/settings.json
vendored
21
.vscode/settings.json
vendored
@@ -2,7 +2,7 @@
|
||||
"python.pythonPath": "api/tacticalrmm/env/bin/python",
|
||||
"python.languageServer": "Pylance",
|
||||
"python.analysis.extraPaths": [
|
||||
"api/tacticalrmm"
|
||||
"api/tacticalrmm",
|
||||
],
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.formatting.provider": "black",
|
||||
@@ -41,4 +41,23 @@
|
||||
"**/*.zip": true
|
||||
},
|
||||
},
|
||||
"go.useLanguageServer": true,
|
||||
"[go]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": false,
|
||||
},
|
||||
"editor.snippetSuggestions": "none",
|
||||
},
|
||||
"[go.mod]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true,
|
||||
},
|
||||
},
|
||||
"gopls": {
|
||||
"usePlaceholders": true,
|
||||
"completeUnimported": true,
|
||||
"staticcheck": true,
|
||||
}
|
||||
}
|
||||
23
.vscode/tasks.json
vendored
Normal file
23
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "docker debug",
|
||||
"type": "shell",
|
||||
"command": "docker-compose",
|
||||
"args": [
|
||||
"-p",
|
||||
"trmm",
|
||||
"-f",
|
||||
".devcontainer/docker-compose.yml",
|
||||
"-f",
|
||||
".devcontainer/docker-compose.debug.yml",
|
||||
"up",
|
||||
"-d",
|
||||
"--build"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -6,7 +6,7 @@
|
||||
[](https://github.com/python/black)
|
||||
|
||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang, as well as the [SaltStack](https://github.com/saltstack/salt) api and [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
@@ -36,7 +36,7 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
||||
## Installation
|
||||
|
||||
### Requirements
|
||||
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04)
|
||||
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10)
|
||||
- A domain you own with at least 3 subdomains
|
||||
- Google Authenticator app (2 factor is NOT optional)
|
||||
|
||||
@@ -62,7 +62,6 @@ sudo ufw default allow outgoing
|
||||
sudo ufw allow ssh
|
||||
sudo ufw allow http
|
||||
sudo ufw allow https
|
||||
sudo ufw allow proto tcp from any to any port 4505,4506
|
||||
sudo ufw allow proto tcp from any to any port 4222
|
||||
sudo ufw enable && sudo ufw reload
|
||||
```
|
||||
|
||||
@@ -1,457 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
import psutil
|
||||
import os
|
||||
import datetime
|
||||
import zlib
|
||||
import json
|
||||
import base64
|
||||
import wmi
|
||||
import win32evtlog
|
||||
import win32con
|
||||
import win32evtlogutil
|
||||
import winerror
|
||||
from time import sleep
|
||||
import requests
|
||||
import subprocess
|
||||
import random
|
||||
import platform
|
||||
|
||||
ARCH = "64" if platform.machine().endswith("64") else "32"
|
||||
PROGRAM_DIR = os.path.join(os.environ["ProgramFiles"], "TacticalAgent")
|
||||
TAC_RMM = os.path.join(PROGRAM_DIR, "tacticalrmm.exe")
|
||||
NSSM = os.path.join(PROGRAM_DIR, "nssm.exe" if ARCH == "64" else "nssm-x86.exe")
|
||||
TEMP_DIR = os.path.join(os.environ["WINDIR"], "Temp")
|
||||
SYS_DRIVE = os.environ["SystemDrive"]
|
||||
PY_BIN = os.path.join(SYS_DRIVE, "\\salt", "bin", "python.exe")
|
||||
SALT_CALL = os.path.join(SYS_DRIVE, "\\salt", "salt-call.bat")
|
||||
|
||||
|
||||
def get_services():
|
||||
# see https://github.com/wh1te909/tacticalrmm/issues/38
|
||||
# for why I am manually implementing the svc.as_dict() method of psutil
|
||||
ret = []
|
||||
for svc in psutil.win_service_iter():
|
||||
i = {}
|
||||
try:
|
||||
i["display_name"] = svc.display_name()
|
||||
i["binpath"] = svc.binpath()
|
||||
i["username"] = svc.username()
|
||||
i["start_type"] = svc.start_type()
|
||||
i["status"] = svc.status()
|
||||
i["pid"] = svc.pid()
|
||||
i["name"] = svc.name()
|
||||
i["description"] = svc.description()
|
||||
except Exception:
|
||||
continue
|
||||
else:
|
||||
ret.append(i)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def run_python_script(filename, timeout, script_type="userdefined"):
|
||||
# no longer used in agent version 0.11.0
|
||||
file_path = os.path.join(TEMP_DIR, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
if script_type == "userdefined":
|
||||
__salt__["cp.get_file"](f"salt://scripts/userdefined/{filename}", file_path)
|
||||
else:
|
||||
__salt__["cp.get_file"](f"salt://scripts/{filename}", file_path)
|
||||
|
||||
return __salt__["cmd.run_all"](f"{PY_BIN} {file_path}", timeout=timeout)
|
||||
|
||||
|
||||
def run_script(filepath, filename, shell, timeout, args=[], bg=False):
|
||||
if shell == "powershell" or shell == "cmd":
|
||||
if args:
|
||||
return __salt__["cmd.script"](
|
||||
source=filepath,
|
||||
args=" ".join(map(lambda x: f'"{x}"', args)),
|
||||
shell=shell,
|
||||
timeout=timeout,
|
||||
bg=bg,
|
||||
)
|
||||
else:
|
||||
return __salt__["cmd.script"](
|
||||
source=filepath, shell=shell, timeout=timeout, bg=bg
|
||||
)
|
||||
|
||||
elif shell == "python":
|
||||
file_path = os.path.join(TEMP_DIR, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
__salt__["cp.get_file"](filepath, file_path)
|
||||
|
||||
salt_cmd = "cmd.run_bg" if bg else "cmd.run_all"
|
||||
|
||||
if args:
|
||||
a = " ".join(map(lambda x: f'"{x}"', args))
|
||||
cmd = f"{PY_BIN} {file_path} {a}"
|
||||
return __salt__[salt_cmd](cmd, timeout=timeout)
|
||||
else:
|
||||
return __salt__[salt_cmd](f"{PY_BIN} {file_path}", timeout=timeout)
|
||||
|
||||
|
||||
def uninstall_agent():
|
||||
remove_exe = os.path.join(PROGRAM_DIR, "unins000.exe")
|
||||
__salt__["cmd.run_bg"]([remove_exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"])
|
||||
return "ok"
|
||||
|
||||
|
||||
def update_salt():
|
||||
for p in psutil.process_iter():
|
||||
with p.oneshot():
|
||||
if p.name() == "tacticalrmm.exe" and "updatesalt" in p.cmdline():
|
||||
return "running"
|
||||
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
cmd = [TAC_RMM, "-m", "updatesalt"]
|
||||
p = Popen(
|
||||
cmd,
|
||||
stdin=PIPE,
|
||||
stdout=PIPE,
|
||||
stderr=PIPE,
|
||||
close_fds=True,
|
||||
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
||||
)
|
||||
return p.pid
|
||||
|
||||
|
||||
def run_manual_checks():
|
||||
__salt__["cmd.run_bg"]([TAC_RMM, "-m", "runchecks"])
|
||||
return "ok"
|
||||
|
||||
|
||||
def install_updates():
|
||||
for p in psutil.process_iter():
|
||||
with p.oneshot():
|
||||
if p.name() == "tacticalrmm.exe" and "winupdater" in p.cmdline():
|
||||
return "running"
|
||||
|
||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "winupdater"])
|
||||
|
||||
|
||||
def _wait_for_service(svc, status, retries=10):
|
||||
attempts = 0
|
||||
while 1:
|
||||
try:
|
||||
service = psutil.win_service_get(svc)
|
||||
except psutil.NoSuchProcess:
|
||||
stat = "fail"
|
||||
attempts += 1
|
||||
sleep(5)
|
||||
else:
|
||||
stat = service.status()
|
||||
if stat != status:
|
||||
attempts += 1
|
||||
sleep(5)
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts == 0 or attempts > retries:
|
||||
break
|
||||
|
||||
return stat
|
||||
|
||||
|
||||
def agent_update_v2(inno, url):
|
||||
# make sure another instance of the update is not running
|
||||
# this function spawns 2 instances of itself (because we call it twice with salt run_bg)
|
||||
# so if more than 2 running, don't continue as an update is already running
|
||||
count = 0
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
with p.oneshot():
|
||||
if "win_agent.agent_update_v2" in p.cmdline():
|
||||
count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if count > 2:
|
||||
return "already running"
|
||||
|
||||
sleep(random.randint(1, 20)) # don't flood the rmm
|
||||
|
||||
exe = os.path.join(TEMP_DIR, inno)
|
||||
|
||||
if os.path.exists(exe):
|
||||
try:
|
||||
os.remove(exe)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
r = requests.get(url, stream=True, timeout=600)
|
||||
except Exception:
|
||||
return "failed"
|
||||
|
||||
if r.status_code != 200:
|
||||
return "failed"
|
||||
|
||||
with open(exe, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
|
||||
ret = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=120)
|
||||
|
||||
tac = _wait_for_service(svc="tacticalagent", status="running")
|
||||
if tac != "running":
|
||||
subprocess.run([NSSM, "start", "tacticalagent"], timeout=30)
|
||||
|
||||
chk = _wait_for_service(svc="checkrunner", status="running")
|
||||
if chk != "running":
|
||||
subprocess.run([NSSM, "start", "checkrunner"], timeout=30)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
def do_agent_update_v2(inno, url):
|
||||
return __salt__["cmd.run_bg"](
|
||||
[
|
||||
SALT_CALL,
|
||||
"win_agent.agent_update_v2",
|
||||
f"inno={inno}",
|
||||
f"url={url}",
|
||||
"--local",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def agent_update(version, url):
|
||||
# make sure another instance of the update is not running
|
||||
# this function spawns 2 instances of itself so if more than 2 running,
|
||||
# don't continue as an update is already running
|
||||
count = 0
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
with p.oneshot():
|
||||
if "win_agent.agent_update" in p.cmdline():
|
||||
count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if count > 2:
|
||||
return "already running"
|
||||
|
||||
sleep(random.randint(1, 60)) # don't flood the rmm
|
||||
try:
|
||||
r = requests.get(url, stream=True, timeout=600)
|
||||
except Exception:
|
||||
return "failed"
|
||||
|
||||
if r.status_code != 200:
|
||||
return "failed"
|
||||
|
||||
exe = os.path.join(TEMP_DIR, f"winagent-v{version}.exe")
|
||||
|
||||
with open(exe, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
|
||||
services = ("tacticalagent", "checkrunner")
|
||||
|
||||
for svc in services:
|
||||
subprocess.run([NSSM, "stop", svc], timeout=120)
|
||||
|
||||
sleep(10)
|
||||
r = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=300)
|
||||
sleep(30)
|
||||
|
||||
for svc in services:
|
||||
subprocess.run([NSSM, "start", svc], timeout=120)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
def do_agent_update(version, url):
|
||||
return __salt__["cmd.run_bg"](
|
||||
[
|
||||
SALT_CALL,
|
||||
"win_agent.agent_update",
|
||||
f"version={version}",
|
||||
f"url={url}",
|
||||
"--local",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class SystemDetail:
|
||||
def __init__(self):
|
||||
self.c = wmi.WMI()
|
||||
self.comp_sys_prod = self.c.Win32_ComputerSystemProduct()
|
||||
self.comp_sys = self.c.Win32_ComputerSystem()
|
||||
self.memory = self.c.Win32_PhysicalMemory()
|
||||
self.os = self.c.Win32_OperatingSystem()
|
||||
self.base_board = self.c.Win32_BaseBoard()
|
||||
self.bios = self.c.Win32_BIOS()
|
||||
self.disk = self.c.Win32_DiskDrive()
|
||||
self.network_adapter = self.c.Win32_NetworkAdapter()
|
||||
self.network_config = self.c.Win32_NetworkAdapterConfiguration()
|
||||
self.desktop_monitor = self.c.Win32_DesktopMonitor()
|
||||
self.cpu = self.c.Win32_Processor()
|
||||
self.usb = self.c.Win32_USBController()
|
||||
|
||||
def get_all(self, obj):
|
||||
ret = []
|
||||
for i in obj:
|
||||
tmp = [
|
||||
{j: getattr(i, j)}
|
||||
for j in list(i.properties)
|
||||
if getattr(i, j) is not None
|
||||
]
|
||||
ret.append(tmp)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def system_info():
|
||||
info = SystemDetail()
|
||||
return {
|
||||
"comp_sys_prod": info.get_all(info.comp_sys_prod),
|
||||
"comp_sys": info.get_all(info.comp_sys),
|
||||
"mem": info.get_all(info.memory),
|
||||
"os": info.get_all(info.os),
|
||||
"base_board": info.get_all(info.base_board),
|
||||
"bios": info.get_all(info.bios),
|
||||
"disk": info.get_all(info.disk),
|
||||
"network_adapter": info.get_all(info.network_adapter),
|
||||
"network_config": info.get_all(info.network_config),
|
||||
"desktop_monitor": info.get_all(info.desktop_monitor),
|
||||
"cpu": info.get_all(info.cpu),
|
||||
"usb": info.get_all(info.usb),
|
||||
}
|
||||
|
||||
|
||||
def local_sys_info():
|
||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "sysinfo"])
|
||||
|
||||
|
||||
def get_procs():
|
||||
ret = []
|
||||
|
||||
# setup
|
||||
for proc in psutil.process_iter():
|
||||
with proc.oneshot():
|
||||
proc.cpu_percent(interval=None)
|
||||
|
||||
# need time for psutil to record cpu percent
|
||||
sleep(1)
|
||||
|
||||
for c, proc in enumerate(psutil.process_iter(), 1):
|
||||
x = {}
|
||||
with proc.oneshot():
|
||||
if proc.pid == 0 or not proc.name():
|
||||
continue
|
||||
|
||||
x["name"] = proc.name()
|
||||
x["cpu_percent"] = proc.cpu_percent(interval=None) / psutil.cpu_count()
|
||||
x["memory_percent"] = proc.memory_percent()
|
||||
x["pid"] = proc.pid
|
||||
x["ppid"] = proc.ppid()
|
||||
x["status"] = proc.status()
|
||||
x["username"] = proc.username()
|
||||
x["id"] = c
|
||||
|
||||
ret.append(x)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _compress_json(j):
|
||||
return {
|
||||
"wineventlog": base64.b64encode(
|
||||
zlib.compress(json.dumps(j).encode("utf-8", errors="ignore"))
|
||||
).decode("ascii", errors="ignore")
|
||||
}
|
||||
|
||||
|
||||
def get_eventlog(logtype, last_n_days):
|
||||
|
||||
start_time = datetime.datetime.now() - datetime.timedelta(days=last_n_days)
|
||||
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ
|
||||
|
||||
status_dict = {
|
||||
win32con.EVENTLOG_AUDIT_FAILURE: "AUDIT_FAILURE",
|
||||
win32con.EVENTLOG_AUDIT_SUCCESS: "AUDIT_SUCCESS",
|
||||
win32con.EVENTLOG_INFORMATION_TYPE: "INFO",
|
||||
win32con.EVENTLOG_WARNING_TYPE: "WARNING",
|
||||
win32con.EVENTLOG_ERROR_TYPE: "ERROR",
|
||||
0: "INFO",
|
||||
}
|
||||
|
||||
computer = "localhost"
|
||||
hand = win32evtlog.OpenEventLog(computer, logtype)
|
||||
total = win32evtlog.GetNumberOfEventLogRecords(hand)
|
||||
log = []
|
||||
uid = 0
|
||||
done = False
|
||||
|
||||
try:
|
||||
while 1:
|
||||
events = win32evtlog.ReadEventLog(hand, flags, 0)
|
||||
for ev_obj in events:
|
||||
|
||||
uid += 1
|
||||
# return once total number of events reach or we'll be stuck in an infinite loop
|
||||
if uid >= total:
|
||||
done = True
|
||||
break
|
||||
|
||||
the_time = ev_obj.TimeGenerated.Format()
|
||||
time_obj = datetime.datetime.strptime(the_time, "%c")
|
||||
if time_obj < start_time:
|
||||
done = True
|
||||
break
|
||||
|
||||
computer = str(ev_obj.ComputerName)
|
||||
src = str(ev_obj.SourceName)
|
||||
evt_type = str(status_dict[ev_obj.EventType])
|
||||
evt_id = str(winerror.HRESULT_CODE(ev_obj.EventID))
|
||||
evt_category = str(ev_obj.EventCategory)
|
||||
record = str(ev_obj.RecordNumber)
|
||||
msg = (
|
||||
str(win32evtlogutil.SafeFormatMessage(ev_obj, logtype))
|
||||
.replace("<", "")
|
||||
.replace(">", "")
|
||||
)
|
||||
|
||||
event_dict = {
|
||||
"computer": computer,
|
||||
"source": src,
|
||||
"eventType": evt_type,
|
||||
"eventID": evt_id,
|
||||
"eventCategory": evt_category,
|
||||
"message": msg,
|
||||
"time": the_time,
|
||||
"record": record,
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
log.append(event_dict)
|
||||
|
||||
if done:
|
||||
break
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
win32evtlog.CloseEventLog(hand)
|
||||
return _compress_json(log)
|
||||
@@ -20,6 +20,5 @@ omit =
|
||||
*/urls.py
|
||||
*/tests.py
|
||||
*/test.py
|
||||
api/*.py
|
||||
checks/utils.py
|
||||
|
||||
|
||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0002_auto_20200810_0544'),
|
||||
("accounts", "0002_auto_20200810_0544"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,24 +6,24 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0003_auto_20200922_1344'),
|
||||
("accounts", "0003_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0004_auto_20201002_1257'),
|
||||
("accounts", "0004_auto_20201002_1257"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,13 +6,13 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0007_update_agent_primary_key'),
|
||||
("accounts", "0007_update_agent_primary_key"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='dark_mode',
|
||||
model_name="user",
|
||||
name="dark_mode",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-10 17:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0008_user_dark_mode"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="show_community_scripts",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-14 01:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0009_user_show_community_scripts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="agent_dblclick_action",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("editagent", "Edit Agent"),
|
||||
("takecontrol", "Take Control"),
|
||||
("remotebg", "Remote Background"),
|
||||
],
|
||||
default="editagent",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.1.5 on 2021-01-18 09:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0010_user_agent_dblclick_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="default_agent_tbl_tab",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("server", "Servers"),
|
||||
("workstation", "Workstations"),
|
||||
("mixed", "Mixed"),
|
||||
],
|
||||
default="server",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -3,11 +3,30 @@ from django.contrib.auth.models import AbstractUser
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
AGENT_DBLCLICK_CHOICES = [
|
||||
("editagent", "Edit Agent"),
|
||||
("takecontrol", "Take Control"),
|
||||
("remotebg", "Remote Background"),
|
||||
]
|
||||
|
||||
AGENT_TBL_TAB_CHOICES = [
|
||||
("server", "Servers"),
|
||||
("workstation", "Workstations"),
|
||||
("mixed", "Mixed"),
|
||||
]
|
||||
|
||||
|
||||
class User(AbstractUser, BaseAuditModel):
|
||||
is_active = models.BooleanField(default=True)
|
||||
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
||||
dark_mode = models.BooleanField(default=True)
|
||||
show_community_scripts = models.BooleanField(default=True)
|
||||
agent_dblclick_action = models.CharField(
|
||||
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
||||
)
|
||||
default_agent_tbl_tab = models.CharField(
|
||||
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
|
||||
)
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
|
||||
@@ -155,6 +155,33 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
data = {
|
||||
"id": self.john.pk,
|
||||
"username": "john",
|
||||
"email": "johndoe@xlawgaming.com",
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
}
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_not_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
data = {
|
||||
"id": self.john.pk,
|
||||
"username": "john",
|
||||
"email": "johndoe@xlawgaming.com",
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_delete(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
r = self.client.delete(url)
|
||||
@@ -166,6 +193,19 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_delete_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_delete_non_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
|
||||
class TestUserAction(TacticalTestCase):
|
||||
def setUp(self):
|
||||
@@ -184,6 +224,21 @@ class TestUserAction(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_post_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_post_non_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_put(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
@@ -195,12 +250,42 @@ class TestUserAction(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_darkmode(self):
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
user = User.objects.get(pk=self.john.pk)
|
||||
self.assertEqual(user.totp_key, "")
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_non_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_user_ui(self):
|
||||
url = "/accounts/users/ui/"
|
||||
data = {"dark_mode": False}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data = {"show_community_scripts": True}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data = {
|
||||
"userui": True,
|
||||
"agent_dblclick_action": "editagent",
|
||||
"default_agent_tbl_tab": "mixed",
|
||||
}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ class LoginView(KnoxLoginView):
|
||||
|
||||
if settings.DEBUG and token == "sekret":
|
||||
valid = True
|
||||
elif totp.verify(token, valid_window=1):
|
||||
elif totp.verify(token, valid_window=10):
|
||||
valid = True
|
||||
|
||||
if valid:
|
||||
@@ -108,6 +108,13 @@ class GetUpdateDeleteUser(APIView):
|
||||
def put(self, request, pk):
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
@@ -115,7 +122,15 @@ class GetUpdateDeleteUser(APIView):
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
get_object_or_404(User, pk=pk).delete()
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be deleted from the UI")
|
||||
|
||||
user.delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -124,8 +139,14 @@ class UserActions(APIView):
|
||||
|
||||
# reset password
|
||||
def post(self, request):
|
||||
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
user.set_password(request.data["password"])
|
||||
user.save()
|
||||
|
||||
@@ -133,8 +154,14 @@ class UserActions(APIView):
|
||||
|
||||
# reset two factor token
|
||||
def put(self, request):
|
||||
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
user.totp_key = ""
|
||||
user.save()
|
||||
|
||||
@@ -161,6 +188,18 @@ class TOTPSetup(APIView):
|
||||
class UserUI(APIView):
|
||||
def patch(self, request):
|
||||
user = request.user
|
||||
user.dark_mode = request.data["dark_mode"]
|
||||
user.save(update_fields=["dark_mode"])
|
||||
return Response("ok")
|
||||
|
||||
if "dark_mode" in request.data.keys():
|
||||
user.dark_mode = request.data["dark_mode"]
|
||||
user.save(update_fields=["dark_mode"])
|
||||
|
||||
if "show_community_scripts" in request.data.keys():
|
||||
user.show_community_scripts = request.data["show_community_scripts"]
|
||||
user.save(update_fields=["show_community_scripts"])
|
||||
|
||||
if "userui" in request.data.keys():
|
||||
user.agent_dblclick_action = request.data["agent_dblclick_action"]
|
||||
user.default_agent_tbl_tab = request.data["default_agent_tbl_tab"]
|
||||
user.save(update_fields=["agent_dblclick_action", "default_agent_tbl_tab"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -26,7 +26,7 @@ def get_wmi_data():
|
||||
agent = Recipe(
|
||||
Agent,
|
||||
hostname="DESKTOP-TEST123",
|
||||
version="1.1.0",
|
||||
version="1.3.0",
|
||||
monitoring_type=cycle(["workstation", "server"]),
|
||||
salt_id=generate_agent_id("DESKTOP-TEST123"),
|
||||
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
|
||||
|
||||
@@ -7,14 +7,20 @@ import django.db.models.deletion
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0006_deployment'),
|
||||
('agents', '0020_auto_20201025_2129'),
|
||||
("clients", "0006_deployment"),
|
||||
("agents", "0020_auto_20201025_2129"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='site_link',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='clients.site'),
|
||||
model_name="agent",
|
||||
name="site_link",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="agents",
|
||||
to="clients.site",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,16 +6,16 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0022_update_site_primary_key'),
|
||||
("agents", "0022_update_site_primary_key"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='agent',
|
||||
name='client',
|
||||
model_name="agent",
|
||||
name="client",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='agent',
|
||||
name='site',
|
||||
model_name="agent",
|
||||
name="site",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,13 +6,13 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0023_auto_20201101_2312'),
|
||||
("agents", "0023_auto_20201101_2312"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='agent',
|
||||
old_name='site_link',
|
||||
new_name='site',
|
||||
model_name="agent",
|
||||
old_name="site_link",
|
||||
new_name="site",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,13 +6,22 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0024_auto_20201101_2319'),
|
||||
("agents", "0024_auto_20201101_2319"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recoveryaction',
|
||||
name='mode',
|
||||
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC')], default='mesh', max_length=50),
|
||||
model_name="recoveryaction",
|
||||
name="mode",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("salt", "Salt"),
|
||||
("mesh", "Mesh"),
|
||||
("command", "Command"),
|
||||
("rpc", "Nats RPC"),
|
||||
],
|
||||
default="mesh",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,13 +6,23 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0025_auto_20201122_0407'),
|
||||
("agents", "0025_auto_20201122_0407"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recoveryaction',
|
||||
name='mode',
|
||||
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC'), ('checkrunner', 'Checkrunner')], default='mesh', max_length=50),
|
||||
model_name="recoveryaction",
|
||||
name="mode",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("salt", "Salt"),
|
||||
("mesh", "Mesh"),
|
||||
("command", "Command"),
|
||||
("rpc", "Nats RPC"),
|
||||
("checkrunner", "Checkrunner"),
|
||||
],
|
||||
default="mesh",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import requests
|
||||
import datetime as dt
|
||||
import time
|
||||
import base64
|
||||
from Crypto.Cipher import AES
|
||||
@@ -8,10 +6,9 @@ from Crypto.Hash import SHA3_384
|
||||
from Crypto.Util.Padding import pad
|
||||
import validators
|
||||
import msgpack
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
from collections import Counter
|
||||
from typing import List
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from distutils.version import LooseVersion
|
||||
@@ -89,6 +86,10 @@ class Agent(BaseAuditModel):
|
||||
def has_nats(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.0")
|
||||
|
||||
@property
|
||||
def has_gotasks(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.1")
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
# return the default timezone unless the timezone is explicity set per agent
|
||||
@@ -116,14 +117,6 @@ class Agent(BaseAuditModel):
|
||||
return settings.DL_32
|
||||
return None
|
||||
|
||||
@property
|
||||
def winsalt_dl(self):
|
||||
if self.arch == "64":
|
||||
return settings.SALT_64
|
||||
elif self.arch == "32":
|
||||
return settings.SALT_32
|
||||
return None
|
||||
|
||||
@property
|
||||
def win_inno_exe(self):
|
||||
if self.arch == "64":
|
||||
@@ -163,13 +156,11 @@ class Agent(BaseAuditModel):
|
||||
elif i.status == "failing":
|
||||
failing += 1
|
||||
|
||||
has_failing_checks = True if failing > 0 else False
|
||||
|
||||
ret = {
|
||||
"total": total,
|
||||
"passing": passing,
|
||||
"failing": failing,
|
||||
"has_failing_checks": has_failing_checks,
|
||||
"has_failing_checks": failing > 0,
|
||||
}
|
||||
return ret
|
||||
|
||||
@@ -383,14 +374,15 @@ class Agent(BaseAuditModel):
|
||||
|
||||
return patch_policy
|
||||
|
||||
# clear is used to delete managed policy checks from agent
|
||||
# parent_checks specifies a list of checks to delete from agent with matching parent_check field
|
||||
def generate_checks_from_policies(self, clear=False):
|
||||
from automation.models import Policy
|
||||
def get_approved_update_guids(self) -> List[str]:
|
||||
return list(
|
||||
self.winupdates.filter(action="approve", installed=False).values_list(
|
||||
"guid", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
# Clear agent checks managed by policy
|
||||
if clear:
|
||||
self.agentchecks.filter(managed_by_policy=True).delete()
|
||||
def generate_checks_from_policies(self):
|
||||
from automation.models import Policy
|
||||
|
||||
# Clear agent checks that have overriden_by_policy set
|
||||
self.agentchecks.update(overriden_by_policy=False)
|
||||
@@ -398,17 +390,9 @@ class Agent(BaseAuditModel):
|
||||
# Generate checks based on policies
|
||||
Policy.generate_policy_checks(self)
|
||||
|
||||
# clear is used to delete managed policy tasks from agent
|
||||
# parent_tasks specifies a list of tasks to delete from agent with matching parent_task field
|
||||
def generate_tasks_from_policies(self, clear=False):
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
def generate_tasks_from_policies(self):
|
||||
from automation.models import Policy
|
||||
|
||||
# Clear agent tasks managed by policy
|
||||
if clear:
|
||||
for task in self.autotasks.filter(managed_by_policy=True):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
|
||||
# Generate tasks based on policies
|
||||
Policy.generate_policy_tasks(self)
|
||||
|
||||
@@ -467,77 +451,6 @@ class Agent(BaseAuditModel):
|
||||
await nc.flush()
|
||||
await nc.close()
|
||||
|
||||
def salt_api_cmd(self, **kwargs):
|
||||
|
||||
# salt should always timeout first before the requests' timeout
|
||||
try:
|
||||
timeout = kwargs["timeout"]
|
||||
except KeyError:
|
||||
# default timeout
|
||||
timeout = 15
|
||||
salt_timeout = 12
|
||||
else:
|
||||
if timeout < 8:
|
||||
timeout = 8
|
||||
salt_timeout = 5
|
||||
else:
|
||||
salt_timeout = timeout - 3
|
||||
|
||||
json = {
|
||||
"client": "local",
|
||||
"tgt": self.salt_id,
|
||||
"fun": kwargs["func"],
|
||||
"timeout": salt_timeout,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[json],
|
||||
timeout=timeout,
|
||||
)
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
try:
|
||||
ret = resp.json()["return"][0][self.salt_id]
|
||||
except Exception as e:
|
||||
logger.error(f"{self.salt_id}: {e}")
|
||||
return "error"
|
||||
else:
|
||||
return ret
|
||||
|
||||
def salt_api_async(self, **kwargs):
|
||||
|
||||
json = {
|
||||
"client": "local_async",
|
||||
"tgt": self.salt_id,
|
||||
"fun": kwargs["func"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
return resp
|
||||
|
||||
@staticmethod
|
||||
def serialize(agent):
|
||||
# serializes the agent and returns json
|
||||
@@ -545,89 +458,9 @@ class Agent(BaseAuditModel):
|
||||
|
||||
ret = AgentEditSerializer(agent).data
|
||||
del ret["all_timezones"]
|
||||
del ret["client"]
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def salt_batch_async(**kwargs):
|
||||
assert isinstance(kwargs["minions"], list)
|
||||
|
||||
json = {
|
||||
"client": "local_async",
|
||||
"tgt_type": "list",
|
||||
"tgt": kwargs["minions"],
|
||||
"fun": kwargs["func"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
return resp
|
||||
|
||||
def schedule_reboot(self, obj):
|
||||
|
||||
start_date = dt.datetime.strftime(obj, "%Y-%m-%d")
|
||||
start_time = dt.datetime.strftime(obj, "%H:%M")
|
||||
|
||||
# let windows task scheduler automatically delete the task after it runs
|
||||
end_obj = obj + dt.timedelta(minutes=15)
|
||||
end_date = dt.datetime.strftime(end_obj, "%Y-%m-%d")
|
||||
end_time = dt.datetime.strftime(end_obj, "%H:%M")
|
||||
|
||||
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||
random.choice(string.ascii_letters) for _ in range(10)
|
||||
)
|
||||
|
||||
r = self.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Windows\\System32\\shutdown.exe"',
|
||||
'arguments="/r /t 5 /f"',
|
||||
"trigger_type=Once",
|
||||
f'start_date="{start_date}"',
|
||||
f'start_time="{start_time}"',
|
||||
f'end_date="{end_date}"',
|
||||
f'end_time="{end_time}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"delete_after=Immediately",
|
||||
],
|
||||
)
|
||||
|
||||
if r == "error" or (isinstance(r, bool) and not r):
|
||||
return "failed"
|
||||
elif r == "timeout":
|
||||
return "timeout"
|
||||
elif isinstance(r, bool) and r:
|
||||
from logs.models import PendingAction
|
||||
|
||||
details = {
|
||||
"taskname": task_name,
|
||||
"time": str(obj),
|
||||
}
|
||||
PendingAction(agent=self, action_type="schedreboot", details=details).save()
|
||||
|
||||
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
return {"msg": {"time": nice_time, "agent": self.hostname}}
|
||||
else:
|
||||
return "failed"
|
||||
|
||||
def not_supported(self, version_added):
|
||||
return pyver.parse(self.version) < pyver.parse(version_added)
|
||||
|
||||
def delete_superseded_updates(self):
|
||||
try:
|
||||
pks = [] # list of pks to delete
|
||||
@@ -680,6 +513,13 @@ class Agent(BaseAuditModel):
|
||||
elif action.details["action"] == "taskdelete":
|
||||
delete_win_task_schedule.delay(task_id, pending_action=action.id)
|
||||
|
||||
# for clearing duplicate pending actions on agent
|
||||
def remove_matching_pending_task_actions(self, task_id):
|
||||
# remove any other pending actions on agent with same task_id
|
||||
for action in self.pendingactions.exclude(status="completed"):
|
||||
if action.details["task_id"] == task_id:
|
||||
action.delete()
|
||||
|
||||
|
||||
class AgentOutage(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
|
||||
@@ -34,21 +34,44 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
]
|
||||
|
||||
|
||||
class AgentOverdueActionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = ["pk", "overdue_email_alert", "overdue_text_alert"]
|
||||
|
||||
|
||||
class AgentTableSerializer(serializers.ModelSerializer):
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
pending_actions = serializers.SerializerMethodField()
|
||||
status = serializers.ReadOnlyField()
|
||||
checks = serializers.ReadOnlyField()
|
||||
last_seen = serializers.SerializerMethodField()
|
||||
client_name = serializers.ReadOnlyField(source="client.name")
|
||||
site_name = serializers.ReadOnlyField(source="site.name")
|
||||
logged_username = serializers.SerializerMethodField()
|
||||
italic = serializers.SerializerMethodField()
|
||||
|
||||
def get_last_seen(self, obj):
|
||||
def get_pending_actions(self, obj):
|
||||
return obj.pendingactions.filter(status="pending").count()
|
||||
|
||||
def get_last_seen(self, obj) -> str:
|
||||
if obj.time_zone is not None:
|
||||
agent_tz = pytz.timezone(obj.time_zone)
|
||||
else:
|
||||
agent_tz = self.context["default_tz"]
|
||||
|
||||
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M:%S")
|
||||
return obj.last_seen.astimezone(agent_tz).timestamp()
|
||||
|
||||
def get_logged_username(self, obj) -> str:
|
||||
if obj.logged_in_username == "None" and obj.status == "online":
|
||||
return obj.last_logged_in_user
|
||||
elif obj.logged_in_username != "None":
|
||||
return obj.logged_in_username
|
||||
else:
|
||||
return "-"
|
||||
|
||||
def get_italic(self, obj) -> bool:
|
||||
return obj.logged_in_username == "None" and obj.status == "online"
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
@@ -62,15 +85,16 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"patches_pending",
|
||||
"pending_actions",
|
||||
"status",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"checks",
|
||||
"logged_in_username",
|
||||
"last_logged_in_user",
|
||||
"maintenance_mode",
|
||||
"logged_username",
|
||||
"italic",
|
||||
]
|
||||
depth = 2
|
||||
|
||||
|
||||
@@ -2,12 +2,11 @@ import asyncio
|
||||
from loguru import logger
|
||||
from time import sleep
|
||||
import random
|
||||
import requests
|
||||
from packaging import version as pyver
|
||||
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from scripts.models import Script
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
from agents.models import Agent, AgentOutage
|
||||
@@ -16,280 +15,102 @@ from logs.models import PendingAction
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
OLD_64_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2.exe"
|
||||
OLD_32_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2-x86.exe"
|
||||
|
||||
def agent_update(pk: int) -> str:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(f"Unable to determine arch on {agent.hostname}. Skipping.")
|
||||
return "noarch"
|
||||
|
||||
# removed sqlite in 1.4.0 to get rid of cgo dependency
|
||||
# 1.3.0 has migration func to move from sqlite to win registry, so force an upgrade to 1.3.0 if old agent
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.3.0"):
|
||||
version = settings.LATEST_AGENT_VER
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
else:
|
||||
version = "1.3.0"
|
||||
inno = (
|
||||
"winagent-v1.3.0.exe" if agent.arch == "64" else "winagent-v1.3.0-x86.exe"
|
||||
)
|
||||
url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}"
|
||||
|
||||
if agent.has_nats:
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
action = agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
if pyver.parse(action.details["version"]) < pyver.parse(version):
|
||||
action.delete()
|
||||
else:
|
||||
return "pending"
|
||||
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": url,
|
||||
"version": version,
|
||||
"inno": inno,
|
||||
},
|
||||
)
|
||||
else:
|
||||
nats_data = {
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": url,
|
||||
"version": version,
|
||||
"inno": inno,
|
||||
},
|
||||
}
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
return "created"
|
||||
else:
|
||||
logger.warning(
|
||||
f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to update."
|
||||
)
|
||||
|
||||
return "not supported"
|
||||
|
||||
|
||||
@app.task
|
||||
def send_agent_update_task(pks, version):
|
||||
assert isinstance(pks, list)
|
||||
|
||||
def send_agent_update_task(pks: List[int], version: str) -> None:
|
||||
q = Agent.objects.filter(pk__in=pks)
|
||||
agents = [i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)]
|
||||
|
||||
agents: List[int] = [
|
||||
i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)
|
||||
]
|
||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
||||
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(
|
||||
f"Unable to determine arch on {agent.salt_id}. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
# golang agent only backwards compatible with py agent 0.11.2
|
||||
# force an upgrade to the latest python agent if version < 0.11.2
|
||||
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
|
||||
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
|
||||
inno = (
|
||||
"winagent-v0.11.2.exe"
|
||||
if agent.arch == "64"
|
||||
else "winagent-v0.11.2-x86.exe"
|
||||
)
|
||||
else:
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
logger.info(
|
||||
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
|
||||
)
|
||||
|
||||
if agent.has_nats:
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
continue
|
||||
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": agent.winagent_dl,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": agent.win_inno_exe,
|
||||
},
|
||||
)
|
||||
# TODO
|
||||
# Salt is deprecated, remove this once salt is gone
|
||||
else:
|
||||
r = agent.salt_api_async(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": inno,
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
sleep(10)
|
||||
agent_update(pk)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
|
||||
@app.task
|
||||
def auto_self_agent_update_task():
|
||||
def auto_self_agent_update_task() -> None:
|
||||
core = CoreSettings.objects.first()
|
||||
if not core.agent_auto_update:
|
||||
logger.info("Agent auto update is disabled. Skipping.")
|
||||
return
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
agents = [
|
||||
pks: List[int] = [
|
||||
i.pk
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
logger.info(f"Updating {len(agents)}")
|
||||
|
||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
||||
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(
|
||||
f"Unable to determine arch on {agent.salt_id}. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
# golang agent only backwards compatible with py agent 0.11.2
|
||||
# force an upgrade to the latest python agent if version < 0.11.2
|
||||
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
|
||||
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
|
||||
inno = (
|
||||
"winagent-v0.11.2.exe"
|
||||
if agent.arch == "64"
|
||||
else "winagent-v0.11.2-x86.exe"
|
||||
)
|
||||
else:
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
logger.info(
|
||||
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
|
||||
)
|
||||
|
||||
if agent.has_nats:
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
continue
|
||||
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": agent.winagent_dl,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": agent.win_inno_exe,
|
||||
},
|
||||
)
|
||||
# TODO
|
||||
# Salt is deprecated, remove this once salt is gone
|
||||
else:
|
||||
r = agent.salt_api_async(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": inno,
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
sleep(10)
|
||||
|
||||
|
||||
@app.task
|
||||
def update_salt_minion_task():
|
||||
q = Agent.objects.all()
|
||||
agents = [
|
||||
i.pk
|
||||
for i in q
|
||||
if pyver.parse(i.version) >= pyver.parse("0.11.0")
|
||||
and pyver.parse(i.salt_ver) < pyver.parse(settings.LATEST_SALT_VER)
|
||||
]
|
||||
|
||||
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_async(func="win_agent.update_salt")
|
||||
sleep(20)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_detail_task(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "sysinfo"}, wait=False))
|
||||
else:
|
||||
agent.salt_api_async(timeout=30, func="win_agent.local_sys_info")
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def sync_salt_modules_task(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=35, func="saltutil.sync_modules")
|
||||
# successful sync if new/charnged files: {'return': [{'MINION-15': ['modules.get_eventlog', 'modules.win_agent', 'etc...']}]}
|
||||
# successful sync with no new/changed files: {'return': [{'MINION-15': []}]}
|
||||
if r == "timeout" or r == "error":
|
||||
return f"Unable to sync modules {agent.salt_id}"
|
||||
|
||||
return f"Successfully synced salt modules on {agent.hostname}"
|
||||
|
||||
|
||||
@app.task
|
||||
def batch_sync_modules_task():
|
||||
# sync modules, split into chunks of 50 agents to not overload salt
|
||||
agents = Agent.objects.all()
|
||||
online = [i.salt_id for i in agents]
|
||||
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
||||
for chunk in chunks:
|
||||
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
|
||||
sleep(10)
|
||||
|
||||
|
||||
@app.task
|
||||
def batch_sysinfo_task():
|
||||
# update system info using WMI
|
||||
agents = Agent.objects.all()
|
||||
|
||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||
minions = [
|
||||
agent.salt_id
|
||||
for agent in agents
|
||||
if not agent.has_nats and pyver.parse(agent.version) >= pyver.parse("0.11.0")
|
||||
]
|
||||
|
||||
if minions:
|
||||
Agent.salt_batch_async(minions=minions, func="win_agent.local_sys_info")
|
||||
|
||||
for agent in agents_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "sysinfo"}, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def uninstall_agent_task(salt_id):
|
||||
attempts = 0
|
||||
error = False
|
||||
|
||||
while 1:
|
||||
try:
|
||||
|
||||
r = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "local",
|
||||
"tgt": salt_id,
|
||||
"fun": "win_agent.uninstall_agent",
|
||||
"timeout": 8,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=10,
|
||||
)
|
||||
ret = r.json()["return"][0][salt_id]
|
||||
except Exception:
|
||||
attempts += 1
|
||||
else:
|
||||
if ret != "ok":
|
||||
attempts += 1
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts >= 10:
|
||||
error = True
|
||||
break
|
||||
elif attempts == 0:
|
||||
break
|
||||
|
||||
if error:
|
||||
logger.error(f"{salt_id} uninstall failed")
|
||||
else:
|
||||
logger.info(f"{salt_id} was successfully uninstalled")
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "wheel",
|
||||
"fun": "key.delete",
|
||||
"match": salt_id,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=30,
|
||||
)
|
||||
except Exception:
|
||||
logger.error(f"{salt_id} unable to remove salt-key")
|
||||
|
||||
return "ok"
|
||||
agent_update(pk)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -330,19 +151,104 @@ def agent_recovery_sms_task(pk):
|
||||
|
||||
@app.task
|
||||
def agent_outages_task():
|
||||
agents = Agent.objects.only("pk")
|
||||
agents = Agent.objects.only(
|
||||
"pk", "last_seen", "overdue_time", "overdue_email_alert", "overdue_text_alert"
|
||||
)
|
||||
|
||||
for agent in agents:
|
||||
if agent.status == "overdue":
|
||||
outages = AgentOutage.objects.filter(agent=agent)
|
||||
if outages and outages.last().is_active:
|
||||
continue
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
outages = AgentOutage.objects.filter(agent=agent)
|
||||
if outages and outages.last().is_active:
|
||||
continue
|
||||
|
||||
outage = AgentOutage(agent=agent)
|
||||
outage.save()
|
||||
outage = AgentOutage(agent=agent)
|
||||
outage.save()
|
||||
|
||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
||||
agent_outage_email_task.delay(pk=outage.pk)
|
||||
# add a null check history to allow gaps in graph
|
||||
for check in agent.agentchecks.all():
|
||||
check.add_check_history(None)
|
||||
|
||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
||||
agent_outage_sms_task.delay(pk=outage.pk)
|
||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
||||
agent_outage_email_task.delay(pk=outage.pk)
|
||||
|
||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
||||
agent_outage_sms_task.delay(pk=outage.pk)
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_agent_recovery_task(pk: int) -> None:
|
||||
sleep(10)
|
||||
from agents.models import RecoveryAction
|
||||
|
||||
action = RecoveryAction.objects.get(pk=pk)
|
||||
if action.mode == "command":
|
||||
data = {"func": "recoverycmd", "recoverycommand": action.command}
|
||||
else:
|
||||
data = {"func": "recover", "payload": {"mode": action.mode}}
|
||||
|
||||
asyncio.run(action.agent.nats_cmd(data, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def run_script_email_results_task(
|
||||
agentpk: int, scriptpk: int, nats_timeout: int, nats_data: dict, emails: List[str]
|
||||
):
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
nats_data["func"] = "runscriptfull"
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=nats_timeout))
|
||||
if r == "timeout":
|
||||
logger.error(f"{agent.hostname} timed out running script.")
|
||||
return
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
subject = f"{agent.hostname} {script.name} Results"
|
||||
exec_time = "{:.4f}".format(r["execution_time"])
|
||||
body = (
|
||||
subject
|
||||
+ f"\nReturn code: {r['retcode']}\nExecution time: {exec_time} seconds\nStdout: {r['stdout']}\nStderr: {r['stderr']}"
|
||||
)
|
||||
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
|
||||
msg = EmailMessage()
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = CORE.smtp_from_email
|
||||
|
||||
if emails:
|
||||
msg["To"] = ", ".join(emails)
|
||||
else:
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients)
|
||||
|
||||
msg.set_content(body)
|
||||
|
||||
try:
|
||||
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
|
||||
if CORE.smtp_requires_auth:
|
||||
server.ehlo()
|
||||
server.starttls()
|
||||
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
else:
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
|
||||
@app.task
|
||||
def remove_salt_task() -> None:
|
||||
if hasattr(settings, "KEEP_SALT") and settings.KEEP_SALT:
|
||||
return
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
agents = [i for i in q if pyver.parse(i.version) >= pyver.parse("1.3.0")]
|
||||
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||
for chunk in chunks:
|
||||
for agent in chunk:
|
||||
asyncio.run(agent.nats_cmd({"func": "removesalt"}, wait=False))
|
||||
sleep(0.1)
|
||||
sleep(4)
|
||||
|
||||
@@ -5,23 +5,15 @@ from unittest.mock import patch
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
from django.test import TestCase, override_settings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from logs.models import PendingAction
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from .serializers import AgentSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from .models import Agent
|
||||
from .tasks import (
|
||||
auto_self_agent_update_task,
|
||||
update_salt_minion_task,
|
||||
get_wmi_detail_task,
|
||||
sync_salt_modules_task,
|
||||
batch_sync_modules_task,
|
||||
batch_sysinfo_task,
|
||||
OLD_64_PY_AGENT,
|
||||
OLD_32_PY_AGENT,
|
||||
)
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
|
||||
|
||||
@@ -33,7 +25,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
client = baker.make("clients.Client", name="Google")
|
||||
site = baker.make("clients.Site", client=client, name="LA Office")
|
||||
self.agent = baker.make_recipe(
|
||||
"agents.online_agent", site=site, version="1.1.0"
|
||||
"agents.online_agent", site=site, version="1.1.1"
|
||||
)
|
||||
baker.make_recipe("winupdate.winupdate_policy", agent=self.agent)
|
||||
|
||||
@@ -112,9 +104,8 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.tasks.uninstall_agent_task.delay")
|
||||
@patch("agents.views.reload_nats")
|
||||
def test_uninstall(self, reload_nats, mock_task, nats_cmd):
|
||||
def test_uninstall(self, reload_nats, nats_cmd):
|
||||
url = "/agents/uninstall/"
|
||||
data = {"pk": self.agent.pk}
|
||||
|
||||
@@ -123,13 +114,18 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
|
||||
reload_nats.assert_called_once()
|
||||
mock_task.assert_called_with(self.agent.salt_id)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_processes(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/getprocs/"
|
||||
agent_old = baker.make_recipe("agents.online_agent", version="1.1.12")
|
||||
url_old = f"/agents/{agent_old.pk}/getprocs/"
|
||||
r = self.client.get(url_old)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
|
||||
url = f"/agents/{agent.pk}/getprocs/"
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/procs.json")
|
||||
@@ -139,9 +135,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
assert any(i["name"] == "Registry" for i in mock_ret.return_value)
|
||||
assert any(
|
||||
i["memory_percent"] == 0.004843281375620747 for i in mock_ret.return_value
|
||||
)
|
||||
assert any(i["membytes"] == 434655234324 for i in mock_ret.return_value)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
@@ -168,28 +162,54 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_event_log(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Application/30/"
|
||||
def test_get_event_log(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Application/22/"
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
|
||||
) as f:
|
||||
mock_ret.return_value = json.load(f)
|
||||
nats_cmd.return_value = json.load(f)
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "eventlog",
|
||||
"timeout": 30,
|
||||
"payload": {
|
||||
"logname": "Application",
|
||||
"days": str(22),
|
||||
},
|
||||
},
|
||||
timeout=32,
|
||||
)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Security/6/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "eventlog",
|
||||
"timeout": 180,
|
||||
"payload": {
|
||||
"logname": "Security",
|
||||
"days": str(6),
|
||||
},
|
||||
},
|
||||
timeout=182,
|
||||
)
|
||||
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_power_action(self, nats_cmd):
|
||||
url = f"/agents/poweraction/"
|
||||
def test_reboot_now(self, nats_cmd):
|
||||
url = f"/agents/reboot/"
|
||||
|
||||
data = {"pk": self.agent.pk, "action": "rebootnow"}
|
||||
data = {"pk": self.agent.pk}
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
@@ -222,30 +242,37 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_reboot_later(self, mock_ret):
|
||||
url = f"/agents/rebootlater/"
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_reboot_later(self, nats_cmd):
|
||||
url = f"/agents/reboot/"
|
||||
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"datetime": "2025-08-29 18:41",
|
||||
}
|
||||
|
||||
mock_ret.return_value = True
|
||||
r = self.client.post(url, data, format="json")
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")
|
||||
self.assertEqual(r.data["agent"], self.agent.hostname)
|
||||
|
||||
mock_ret.return_value = "failed"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"trigger": "once",
|
||||
"name": r.data["task_name"],
|
||||
"year": 2025,
|
||||
"month": "August",
|
||||
"day": 29,
|
||||
"hour": 18,
|
||||
"min": 41,
|
||||
},
|
||||
}
|
||||
nats_cmd.assert_called_with(nats_data, timeout=10)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "error creating task"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@@ -253,12 +280,12 @@ class TestAgentViews(TacticalTestCase):
|
||||
"pk": self.agent.pk,
|
||||
"datetime": "rm -rf /",
|
||||
}
|
||||
r = self.client.post(url, data_invalid, format="json")
|
||||
r = self.client.patch(url, data_invalid, format="json")
|
||||
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "Invalid date")
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("os.path.exists")
|
||||
@patch("subprocess.run")
|
||||
@@ -326,7 +353,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data["mode"] = "salt"
|
||||
data["mode"] = "mesh"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("pending", r.json())
|
||||
@@ -346,7 +373,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.agent.version = "0.9.4"
|
||||
self.agent.save(update_fields=["version"])
|
||||
data["mode"] = "salt"
|
||||
data["mode"] = "mesh"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("0.9.5", r.json())
|
||||
@@ -428,7 +455,14 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertIn("&viewmode=13", r.data["file"])
|
||||
self.assertIn("&viewmode=12", r.data["terminal"])
|
||||
self.assertIn("&viewmode=11", r.data["control"])
|
||||
self.assertIn("mstsc.html?login=", r.data["webrdp"])
|
||||
|
||||
self.assertIn("&gotonode=", r.data["file"])
|
||||
self.assertIn("&gotonode=", r.data["terminal"])
|
||||
self.assertIn("&gotonode=", r.data["control"])
|
||||
|
||||
self.assertIn("?login=", r.data["file"])
|
||||
self.assertIn("?login=", r.data["terminal"])
|
||||
self.assertIn("?login=", r.data["control"])
|
||||
|
||||
self.assertEqual(self.agent.hostname, r.data["hostname"])
|
||||
self.assertEqual(self.agent.client.name, r.data["client"])
|
||||
@@ -471,42 +505,20 @@ class TestAgentViews(TacticalTestCase):
|
||||
def test_overdue_action(self):
|
||||
url = "/agents/overdueaction/"
|
||||
|
||||
payload = {"pk": self.agent.pk, "alertType": "email", "action": "enabled"}
|
||||
payload = {"pk": self.agent.pk, "overdue_email_alert": True}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertTrue(agent.overdue_email_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "email", "action": "disabled"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertFalse(agent.overdue_email_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "text", "action": "enabled"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertTrue(agent.overdue_text_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "text", "action": "disabled"})
|
||||
payload = {"pk": self.agent.pk, "overdue_text_alert": False}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertFalse(agent.overdue_text_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "email", "action": "523423"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
payload.update({"alertType": "text", "action": "asdasd3434asdasd"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_list_agents_no_detail(self):
|
||||
@@ -527,7 +539,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||
""" @patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||
@patch("scripts.tasks.handle_bulk_script_task.delay")
|
||||
@patch("scripts.tasks.handle_bulk_command_task.delay")
|
||||
@patch("agents.models.Agent.salt_batch_async")
|
||||
@@ -538,6 +550,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "all",
|
||||
"target": "agents",
|
||||
"client": None,
|
||||
"site": None,
|
||||
@@ -555,6 +568,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "servers",
|
||||
"target": "agents",
|
||||
"client": None,
|
||||
"site": None,
|
||||
@@ -569,12 +583,11 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "workstations",
|
||||
"target": "client",
|
||||
"client": self.agent.client.id,
|
||||
"site": None,
|
||||
"agentPKs": [
|
||||
self.agent.pk,
|
||||
],
|
||||
"agentPKs": [],
|
||||
"cmd": "gpupdate /force",
|
||||
"timeout": 300,
|
||||
"shell": "cmd",
|
||||
@@ -586,6 +599,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "all",
|
||||
"target": "client",
|
||||
"client": self.agent.client.id,
|
||||
"site": self.agent.site.id,
|
||||
@@ -603,6 +617,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
payload = {
|
||||
"mode": "scan",
|
||||
"monType": "all",
|
||||
"target": "agents",
|
||||
"client": None,
|
||||
"site": None,
|
||||
@@ -616,6 +631,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
payload = {
|
||||
"mode": "install",
|
||||
"monType": "all",
|
||||
"target": "client",
|
||||
"client": self.agent.client.id,
|
||||
"site": None,
|
||||
@@ -637,7 +653,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
# TODO mock the script
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
self.check_not_authenticated("post", url) """
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_recover_mesh(self, nats_cmd):
|
||||
@@ -740,131 +756,82 @@ class TestAgentTasks(TacticalTestCase):
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.models.Agent.salt_api_async", return_value=None)
|
||||
def test_get_wmi_detail_task(self, salt_api_async, nats_cmd):
|
||||
self.agent_salt = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
ret = get_wmi_detail_task.s(self.agent_salt.pk).apply()
|
||||
salt_api_async.assert_called_with(timeout=30, func="win_agent.local_sys_info")
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
def test_agent_update(self, nats_cmd):
|
||||
from agents.tasks import agent_update
|
||||
|
||||
self.agent_nats = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
ret = get_wmi_detail_task.s(self.agent_nats.pk).apply()
|
||||
nats_cmd.assert_called_with({"func": "sysinfo"}, wait=False)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_sync_salt_modules_task(self, salt_api_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
salt_api_cmd.return_value = {"return": [{f"{self.agent.salt_id}": []}]}
|
||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
||||
salt_api_cmd.assert_called_with(timeout=35, func="saltutil.sync_modules")
|
||||
agent_noarch = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Error getting OS",
|
||||
version="1.1.11",
|
||||
)
|
||||
r = agent_update(agent_noarch.pk)
|
||||
self.assertEqual(r, "noarch")
|
||||
self.assertEqual(
|
||||
ret.result, f"Successfully synced salt modules on {self.agent.hostname}"
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
|
||||
|
||||
@patch("agents.models.Agent.salt_batch_async", return_value=None)
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_batch_sync_modules_task(self, mock_sleep, salt_batch_async):
|
||||
# chunks of 50, should run 4 times
|
||||
baker.make_recipe(
|
||||
"agents.online_agent", last_seen=djangotime.now(), _quantity=60
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.overdue_agent",
|
||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=9),
|
||||
_quantity=115,
|
||||
)
|
||||
ret = batch_sync_modules_task.s().apply()
|
||||
self.assertEqual(salt_batch_async.call_count, 4)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.models.Agent.salt_batch_async", return_value=None)
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_batch_sysinfo_task(self, mock_sleep, salt_batch_async, nats_cmd):
|
||||
|
||||
self.agents_nats = baker.make_recipe(
|
||||
"agents.agent", version="1.1.0", _quantity=20
|
||||
)
|
||||
# test nats
|
||||
ret = batch_sysinfo_task.s().apply()
|
||||
self.assertEqual(nats_cmd.call_count, 20)
|
||||
nats_cmd.assert_called_with({"func": "sysinfo"}, wait=False)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
self.agents_salt = baker.make_recipe(
|
||||
"agents.agent", version="1.0.2", _quantity=70
|
||||
PendingAction.objects.filter(
|
||||
agent=agent_noarch, action_type="agentupdate"
|
||||
).count(),
|
||||
0,
|
||||
)
|
||||
|
||||
minions = [i.salt_id for i in self.agents_salt]
|
||||
|
||||
ret = batch_sysinfo_task.s().apply()
|
||||
self.assertEqual(salt_batch_async.call_count, 1)
|
||||
salt_batch_async.assert_called_with(
|
||||
minions=minions, func="win_agent.local_sys_info"
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
salt_batch_async.reset_mock()
|
||||
[i.delete() for i in self.agents_salt]
|
||||
|
||||
# test old agents, should not run
|
||||
self.agents_old = baker.make_recipe(
|
||||
"agents.agent", version="0.10.2", _quantity=70
|
||||
)
|
||||
ret = batch_sysinfo_task.s().apply()
|
||||
salt_batch_async.assert_not_called()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async", return_value=None)
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_update_salt_minion_task(self, mock_sleep, salt_api_async):
|
||||
# test agents that need salt update
|
||||
self.agents = baker.make_recipe(
|
||||
agent64_111 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
salt_ver="1.0.3",
|
||||
_quantity=53,
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.11",
|
||||
)
|
||||
ret = update_salt_minion_task.s().apply()
|
||||
self.assertEqual(salt_api_async.call_count, 53)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
[i.delete() for i in self.agents]
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test agents that need salt update but agent version too low
|
||||
self.agents = baker.make_recipe(
|
||||
r = agent_update(agent64_111.pk)
|
||||
self.assertEqual(r, "created")
|
||||
action = PendingAction.objects.get(agent__pk=agent64_111.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
self.assertEqual(
|
||||
action.details["url"],
|
||||
"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
)
|
||||
self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe")
|
||||
self.assertEqual(action.details["version"], "1.3.0")
|
||||
|
||||
agent_64_130 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
version="0.10.2",
|
||||
salt_ver="1.0.3",
|
||||
_quantity=53,
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.3.0",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
r = agent_update(agent_64_130.pk)
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": settings.DL_64,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
ret = update_salt_minion_task.s().apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
salt_api_async.assert_not_called()
|
||||
[i.delete() for i in self.agents]
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test agents already on latest salt ver
|
||||
self.agents = baker.make_recipe(
|
||||
agent64_old = baker.make_recipe(
|
||||
"agents.agent",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
salt_ver=settings.LATEST_SALT_VER,
|
||||
_quantity=53,
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.2.1",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
r = agent_update(agent64_old.pk)
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
"version": "1.3.0",
|
||||
"inno": "winagent-v1.3.0.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
ret = update_salt_minion_task.s().apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
salt_api_async.assert_not_called()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async")
|
||||
""" @patch("agents.models.Agent.salt_api_async")
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
|
||||
# test 64bit golang agent
|
||||
@@ -967,4 +934,4 @@ class TestAgentTasks(TacticalTestCase):
|
||||
"url": OLD_32_PY_AGENT,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.assertEqual(ret.status, "SUCCESS") """
|
||||
|
||||
@@ -12,7 +12,6 @@ urlpatterns = [
|
||||
path("<pk>/agentdetail/", views.agent_detail),
|
||||
path("<int:pk>/meshcentral/", views.meshcentral),
|
||||
path("<str:arch>/getmeshexe/", views.get_mesh_exe),
|
||||
path("poweraction/", views.power_action),
|
||||
path("uninstall/", views.uninstall),
|
||||
path("editagent/", views.edit_agent),
|
||||
path("<pk>/geteventlog/<logtype>/<days>/", views.get_event_log),
|
||||
@@ -20,7 +19,7 @@ urlpatterns = [
|
||||
path("updateagents/", views.update_agents),
|
||||
path("<pk>/getprocs/", views.get_processes),
|
||||
path("<pk>/<pid>/killproc/", views.kill_proc),
|
||||
path("rebootlater/", views.reboot_later),
|
||||
path("reboot/", views.Reboot.as_view()),
|
||||
path("installagent/", views.install_agent),
|
||||
path("<int:pk>/ping/", views.ping),
|
||||
path("recover/", views.recover),
|
||||
@@ -31,4 +30,5 @@ urlpatterns = [
|
||||
path("bulk/", views.bulk),
|
||||
path("agent_counts/", views.agent_counts),
|
||||
path("maintenance/", views.agent_maintenance),
|
||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||
]
|
||||
|
||||
@@ -3,8 +3,11 @@ from loguru import logger
|
||||
import os
|
||||
import subprocess
|
||||
import pytz
|
||||
import random
|
||||
import string
|
||||
import datetime as dt
|
||||
from packaging import version as pyver
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -18,7 +21,7 @@ from rest_framework import status, generics
|
||||
from .models import Agent, AgentOutage, RecoveryAction, Note
|
||||
from core.models import CoreSettings
|
||||
from scripts.models import Script
|
||||
from logs.models import AuditLog
|
||||
from logs.models import AuditLog, PendingAction
|
||||
|
||||
from .serializers import (
|
||||
AgentSerializer,
|
||||
@@ -27,11 +30,15 @@ from .serializers import (
|
||||
AgentEditSerializer,
|
||||
NoteSerializer,
|
||||
NotesSerializer,
|
||||
AgentOverdueActionSerializer,
|
||||
)
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .tasks import uninstall_agent_task, send_agent_update_task
|
||||
from winupdate.tasks import bulk_check_for_updates_task
|
||||
from .tasks import (
|
||||
send_agent_update_task,
|
||||
run_script_email_results_task,
|
||||
)
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
|
||||
from tacticalrmm.utils import notify_error, reload_nats
|
||||
@@ -61,38 +68,32 @@ def update_agents(request):
|
||||
@api_view()
|
||||
def ping(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=10))
|
||||
status = "offline"
|
||||
if agent.has_nats:
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
|
||||
if r == "timeout" or r == "natsdown":
|
||||
return Response({"name": agent.hostname, "status": "offline"})
|
||||
elif r == "pong":
|
||||
return Response({"name": agent.hostname, "status": "online"})
|
||||
|
||||
return Response({"name": agent.hostname, "status": "offline"})
|
||||
return Response({"name": agent.hostname, "status": status})
|
||||
|
||||
|
||||
@api_view(["DELETE"])
|
||||
def uninstall(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
|
||||
salt_id = agent.salt_id
|
||||
name = agent.hostname
|
||||
agent.delete()
|
||||
reload_nats()
|
||||
|
||||
uninstall_agent_task.delay(salt_id)
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
def edit_agent(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||
|
||||
old_site = agent.site.pk
|
||||
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer.is_valid(raise_exception=True)
|
||||
a_serializer.save()
|
||||
@@ -104,6 +105,11 @@ def edit_agent(request):
|
||||
p_serializer.is_valid(raise_exception=True)
|
||||
p_serializer.save()
|
||||
|
||||
# check if site changed and initiate generating correct policies
|
||||
if old_site != request.data["site"]:
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -119,16 +125,9 @@ def meshcentral(request, pk):
|
||||
if token == "err":
|
||||
return notify_error("Invalid mesh token")
|
||||
|
||||
control = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
)
|
||||
terminal = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
)
|
||||
file = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
)
|
||||
webrdp = f"{core.mesh_site}/mstsc.html?login={token}&node={agent.mesh_node_id}"
|
||||
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
|
||||
AuditLog.audit_mesh_session(username=request.user.username, hostname=agent.hostname)
|
||||
|
||||
@@ -137,7 +136,6 @@ def meshcentral(request, pk):
|
||||
"control": control,
|
||||
"terminal": terminal,
|
||||
"file": file,
|
||||
"webrdp": webrdp,
|
||||
"status": agent.status,
|
||||
"client": agent.client.name,
|
||||
"site": agent.site.name,
|
||||
@@ -154,12 +152,12 @@ def agent_detail(request, pk):
|
||||
@api_view()
|
||||
def get_processes(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
if pyver.parse(agent.version) < pyver.parse("1.2.0"):
|
||||
return notify_error("Requires agent version 1.2.0 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response(r)
|
||||
|
||||
|
||||
@@ -186,34 +184,22 @@ def get_event_log(request, pk, logtype, days):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = 180 if logtype == "Security" else 30
|
||||
data = {
|
||||
"func": "eventlog",
|
||||
"timeout": 30,
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"logname": logtype,
|
||||
"days": str(days),
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=32))
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response(r)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def power_action(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
if request.data["action"] == "rebootnow":
|
||||
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def send_raw_cmd(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
@@ -349,47 +335,72 @@ def by_site(request, sitepk):
|
||||
|
||||
@api_view(["POST"])
|
||||
def overdue_action(request):
|
||||
pk = request.data["pk"]
|
||||
alert_type = request.data["alertType"]
|
||||
action = request.data["action"]
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if alert_type == "email" and action == "enabled":
|
||||
agent.overdue_email_alert = True
|
||||
agent.save(update_fields=["overdue_email_alert"])
|
||||
elif alert_type == "email" and action == "disabled":
|
||||
agent.overdue_email_alert = False
|
||||
agent.save(update_fields=["overdue_email_alert"])
|
||||
elif alert_type == "text" and action == "enabled":
|
||||
agent.overdue_text_alert = True
|
||||
agent.save(update_fields=["overdue_text_alert"])
|
||||
elif alert_type == "text" and action == "disabled":
|
||||
agent.overdue_text_alert = False
|
||||
agent.save(update_fields=["overdue_text_alert"])
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Something went wrong"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
serializer = AgentOverdueActionSerializer(
|
||||
instance=agent, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response(agent.hostname)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def reboot_later(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
date_time = request.data["datetime"]
|
||||
class Reboot(APIView):
|
||||
# reboot now
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
try:
|
||||
obj = dt.datetime.strptime(date_time, "%Y-%m-%d %H:%M")
|
||||
except Exception:
|
||||
return notify_error("Invalid date")
|
||||
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
r = agent.schedule_reboot(obj)
|
||||
return Response("ok")
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "failed":
|
||||
return notify_error("Something went wrong")
|
||||
# reboot later
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
return Response(r["msg"])
|
||||
try:
|
||||
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
|
||||
except Exception:
|
||||
return notify_error("Invalid date")
|
||||
|
||||
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||
random.choice(string.ascii_letters) for _ in range(10)
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"trigger": "once",
|
||||
"name": task_name,
|
||||
"year": int(dt.datetime.strftime(obj, "%Y")),
|
||||
"month": dt.datetime.strftime(obj, "%B"),
|
||||
"day": int(dt.datetime.strftime(obj, "%d")),
|
||||
"hour": int(dt.datetime.strftime(obj, "%H")),
|
||||
"min": int(dt.datetime.strftime(obj, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.1.2"):
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error(r)
|
||||
|
||||
details = {"taskname": task_name, "time": str(obj)}
|
||||
PendingAction.objects.create(
|
||||
agent=agent, action_type="schedreboot", details=details
|
||||
)
|
||||
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
return Response(
|
||||
{"time": nice_time, "agent": agent.hostname, "task_name": task_name}
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@@ -450,7 +461,7 @@ def install_agent(request):
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"build",
|
||||
f"-ldflags=\"-X 'main.Inno={inno}'",
|
||||
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
|
||||
f"-X 'main.Api={api}'",
|
||||
f"-X 'main.Client={client_id}'",
|
||||
f"-X 'main.Site={site_id}'",
|
||||
@@ -550,7 +561,7 @@ def install_agent(request):
|
||||
"&&",
|
||||
"timeout",
|
||||
"/t",
|
||||
"20",
|
||||
"10",
|
||||
"/nobreak",
|
||||
">",
|
||||
"NUL",
|
||||
@@ -580,8 +591,6 @@ def install_agent(request):
|
||||
resp = {
|
||||
"cmd": " ".join(str(i) for i in cmd),
|
||||
"url": download_url,
|
||||
"salt64": settings.SALT_64,
|
||||
"salt32": settings.SALT_32,
|
||||
}
|
||||
|
||||
return Response(resp)
|
||||
@@ -642,17 +651,12 @@ def recover(request):
|
||||
return notify_error("Only available in agent version greater than 0.9.5")
|
||||
|
||||
if not agent.has_nats:
|
||||
if mode == "tacagent" or mode == "checkrunner" or mode == "rpc":
|
||||
if mode == "tacagent" or mode == "rpc":
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
||||
if agent.has_nats:
|
||||
if (
|
||||
mode == "tacagent"
|
||||
or mode == "checkrunner"
|
||||
or mode == "salt"
|
||||
or mode == "mesh"
|
||||
):
|
||||
if mode == "tacagent" or mode == "mesh":
|
||||
data = {"func": "recover", "payload": {"mode": mode}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||
if r == "ok":
|
||||
@@ -711,6 +715,21 @@ def run_script(request):
|
||||
if output == "wait":
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
|
||||
return Response(r)
|
||||
elif output == "email":
|
||||
if not pyver.parse(agent.version) >= pyver.parse("1.1.12"):
|
||||
return notify_error("Requires agent version 1.1.12 or greater")
|
||||
|
||||
emails = (
|
||||
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||
)
|
||||
run_script_email_results_task.delay(
|
||||
agentpk=agent.pk,
|
||||
scriptpk=script.pk,
|
||||
nats_timeout=req_timeout,
|
||||
nats_data=data,
|
||||
emails=emails,
|
||||
)
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
else:
|
||||
asyncio.run(agent.nats_cmd(data, wait=False))
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
@@ -794,12 +813,16 @@ def bulk(request):
|
||||
elif request.data["target"] == "agents":
|
||||
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
elif request.data["target"] == "all":
|
||||
q = Agent.objects.all()
|
||||
q = Agent.objects.only("pk", "monitoring_type")
|
||||
else:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
minions = [agent.salt_id for agent in q]
|
||||
agents = [agent.pk for agent in q]
|
||||
if request.data["monType"] == "servers":
|
||||
q = q.filter(monitoring_type="server")
|
||||
elif request.data["monType"] == "workstations":
|
||||
q = q.filter(monitoring_type="workstation")
|
||||
|
||||
agents: List[int] = [agent.pk for agent in q]
|
||||
|
||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||
|
||||
@@ -817,14 +840,12 @@ def bulk(request):
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "install":
|
||||
r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates")
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["mode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(minions=minions)
|
||||
bulk_check_for_updates_task.delay(agents)
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
|
||||
return notify_error("Something went wrong")
|
||||
@@ -871,3 +892,15 @@ def agent_maintenance(request):
|
||||
return notify_error("Invalid data")
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class WMI(APIView):
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if pyver.parse(agent.version) < pyver.parse("1.1.2"):
|
||||
return notify_error("Requires agent version 1.1.2 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response("ok")
|
||||
|
||||
@@ -7,19 +7,25 @@ import django.db.models.deletion
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0010_auto_20200922_1344'),
|
||||
('alerts', '0002_auto_20200815_1618'),
|
||||
("checks", "0010_auto_20200922_1344"),
|
||||
("alerts", "0002_auto_20200815_1618"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='assigned_check',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='checks.check'),
|
||||
model_name="alert",
|
||||
name="assigned_check",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="alert",
|
||||
to="checks.check",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alert',
|
||||
name='alert_time',
|
||||
model_name="alert",
|
||||
name="alert_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -37,7 +37,7 @@ class Alert(models.Model):
|
||||
@classmethod
|
||||
def create_availability_alert(cls, agent):
|
||||
pass
|
||||
|
||||
|
||||
@classmethod
|
||||
def create_check_alert(cls, check):
|
||||
pass
|
||||
pass
|
||||
|
||||
@@ -16,4 +16,4 @@ class AlertSerializer(ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Alert
|
||||
fields = "__all__"
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
name = "api"
|
||||
@@ -1,11 +0,0 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
from apiv3 import views as v3_views
|
||||
|
||||
urlpatterns = [
|
||||
path("triggerpatchscan/", views.trigger_patch_scan),
|
||||
path("<int:pk>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<int:pk>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("<int:pk>/saltinfo/", views.SaltInfo.as_view()),
|
||||
path("<int:pk>/meshinfo/", v3_views.MeshInfo.as_view()),
|
||||
]
|
||||
@@ -1,149 +0,0 @@
|
||||
from loguru import logger
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.decorators import (
|
||||
api_view,
|
||||
authentication_classes,
|
||||
permission_classes,
|
||||
)
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
from winupdate.tasks import check_for_updates_task
|
||||
|
||||
from autotasks.serializers import TaskRunnerGetSerializer, TaskRunnerPatchSerializer
|
||||
from checks.serializers import CheckRunnerGetSerializer, CheckResultsSerializer
|
||||
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
@authentication_classes((TokenAuthentication,))
|
||||
@permission_classes((IsAuthenticated,))
|
||||
def trigger_patch_scan(request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
reboot_policy = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="system.reboot",
|
||||
arg=7,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
else:
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
else:
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
"""
|
||||
For windows agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
checks = Check.objects.filter(agent__pk=pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request, pk):
|
||||
check = get_object_or_404(Check, pk=pk)
|
||||
|
||||
if check.check_type != "cpuload" and check.check_type != "memory":
|
||||
serializer = CheckResultsSerializer(
|
||||
instance=check, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
|
||||
else:
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
|
||||
check.handle_check(request.data)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class TaskRunner(APIView):
|
||||
"""
|
||||
For the windows python agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
return Response(TaskRunnerGetSerializer(task).data)
|
||||
|
||||
def patch(self, request, pk):
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
serializer = TaskRunnerPatchSerializer(
|
||||
instance=task, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SaltInfo(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
ret = {
|
||||
"latestVer": settings.LATEST_SALT_VER,
|
||||
"currentVer": agent.salt_ver,
|
||||
"salt_id": agent.salt_id,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
agent.salt_ver = request.data["ver"]
|
||||
agent.save(update_fields=["salt_ver"])
|
||||
return Response("ok")
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class Apiv2Config(AppConfig):
|
||||
name = 'apiv2'
|
||||
@@ -1,38 +0,0 @@
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from unittest.mock import patch
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
|
||||
class TestAPIv2(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_sync_modules(self, mock_ret):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
url = "/api/v2/saltminion/"
|
||||
payload = {"agent_id": agent.agent_id}
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = []
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Modules are already in sync")
|
||||
|
||||
mock_ret.return_value = ["modules.win_agent"]
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
||||
|
||||
mock_ret.return_value = ["askdjaskdjasd", "modules.win_agent"]
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
from apiv3 import views as v3_views
|
||||
|
||||
urlpatterns = [
|
||||
path("newagent/", v3_views.NewAgent.as_view()),
|
||||
path("meshexe/", v3_views.MeshExe.as_view()),
|
||||
path("saltminion/", v3_views.SaltMinion.as_view()),
|
||||
path("<str:agentid>/saltminion/", v3_views.SaltMinion.as_view()),
|
||||
path("sysinfo/", v3_views.SysInfo.as_view()),
|
||||
path("hello/", v3_views.Hello.as_view()),
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
]
|
||||
@@ -1,41 +0,0 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
|
||||
from checks.serializers import CheckRunnerGetSerializerV2
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
"""
|
||||
For the windows python agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializerV2(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request):
|
||||
check = get_object_or_404(Check, pk=request.data["id"])
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
status = check.handle_checkv2(request.data)
|
||||
return Response(status)
|
||||
@@ -26,34 +26,13 @@ class TestAPIv3(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_salt_minion(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/saltminion/"
|
||||
url2 = f"/api/v2/{self.agent.agent_id}/saltminion/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn("latestVer", r.json().keys())
|
||||
self.assertIn("currentVer", r.json().keys())
|
||||
self.assertIn("salt_id", r.json().keys())
|
||||
self.assertIn("downloadURL", r.json().keys())
|
||||
|
||||
r2 = self.client.get(url2)
|
||||
self.assertEqual(r2.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("get", url2)
|
||||
|
||||
def test_get_mesh_info(self):
|
||||
url = f"/api/v3/{self.agent.pk}/meshinfo/"
|
||||
url2 = f"/api/v1/{self.agent.pk}/meshinfo/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
r = self.client.get(url2)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("get", url2)
|
||||
|
||||
def test_get_winupdater(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/winupdater/"
|
||||
@@ -65,7 +44,7 @@ class TestAPIv3(TacticalTestCase):
|
||||
def test_sysinfo(self):
|
||||
# TODO replace this with golang wmi sample data
|
||||
|
||||
url = f"/api/v3/sysinfo/"
|
||||
url = "/api/v3/sysinfo/"
|
||||
with open(
|
||||
os.path.join(
|
||||
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
|
||||
@@ -81,7 +60,7 @@ class TestAPIv3(TacticalTestCase):
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_hello_patch(self):
|
||||
url = f"/api/v3/hello/"
|
||||
url = "/api/v3/hello/"
|
||||
payload = {
|
||||
"agent_id": self.agent.agent_id,
|
||||
"logged_in_username": "None",
|
||||
@@ -96,3 +75,12 @@ class TestAPIv3(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_checkrunner_interval(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(
|
||||
r.json(),
|
||||
{"agent": self.agent.pk, "check_interval": self.agent.check_interval},
|
||||
)
|
||||
|
||||
@@ -2,16 +2,18 @@ from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("checkin/", views.CheckIn.as_view()),
|
||||
path("hello/", views.Hello.as_view()),
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
|
||||
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("saltminion/", views.SaltMinion.as_view()),
|
||||
path("<str:agentid>/saltminion/", views.SaltMinion.as_view()),
|
||||
path("<int:pk>/meshinfo/", views.MeshInfo.as_view()),
|
||||
path("meshexe/", views.MeshExe.as_view()),
|
||||
path("sysinfo/", views.SysInfo.as_view()),
|
||||
path("newagent/", views.NewAgent.as_view()),
|
||||
path("winupdater/", views.WinUpdater.as_view()),
|
||||
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
|
||||
path("software/", views.Software.as_view()),
|
||||
path("installer/", views.Installer.as_view()),
|
||||
]
|
||||
|
||||
@@ -2,12 +2,12 @@ import asyncio
|
||||
import os
|
||||
import requests
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from django.http import HttpResponse
|
||||
from rest_framework import serializers
|
||||
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
@@ -20,7 +20,8 @@ from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
from accounts.models import User
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from checks.serializers import CheckRunnerGetSerializerV3
|
||||
from software.models import InstalledSoftware
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
from winupdate.serializers import ApprovedUpdateSerializer
|
||||
@@ -28,18 +29,110 @@ from winupdate.serializers import ApprovedUpdateSerializer
|
||||
from agents.tasks import (
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
get_wmi_detail_task,
|
||||
sync_salt_modules_task,
|
||||
)
|
||||
from winupdate.tasks import check_for_updates_task
|
||||
from software.tasks import get_installed_software, install_chocolatey
|
||||
from checks.utils import bytes2human
|
||||
from tacticalrmm.utils import notify_error, reload_nats
|
||||
from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class CheckIn(APIView):
|
||||
"""
|
||||
The agent's checkin endpoint
|
||||
patch: called every 45 to 110 seconds, handles agent updates and recovery
|
||||
put: called every 5 to 10 minutes, handles basic system info
|
||||
post: called once on windows service startup
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
agent.version = request.data["version"]
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["version", "last_seen"])
|
||||
|
||||
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
|
||||
last_outage = agent.agentoutages.last()
|
||||
last_outage.recovery_time = djangotime.now()
|
||||
last_outage.save(update_fields=["recovery_time"])
|
||||
|
||||
if agent.overdue_email_alert:
|
||||
agent_recovery_email_task.delay(pk=last_outage.pk)
|
||||
if agent.overdue_text_alert:
|
||||
agent_recovery_sms_task.delay(pk=last_outage.pk)
|
||||
|
||||
recovery = agent.recoveryactions.filter(last_run=None).last()
|
||||
if recovery is not None:
|
||||
recovery.last_run = djangotime.now()
|
||||
recovery.save(update_fields=["last_run"])
|
||||
return Response(recovery.send())
|
||||
|
||||
# handle agent update
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
update = agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
update.status = "completed"
|
||||
update.save(update_fields=["status"])
|
||||
return Response(update.details)
|
||||
|
||||
# get any pending actions
|
||||
if agent.pendingactions.filter(status="pending").exists():
|
||||
agent.handle_pending_actions()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
if "disks" in request.data.keys():
|
||||
disks = request.data["disks"]
|
||||
new = []
|
||||
# python agent
|
||||
if isinstance(disks, dict):
|
||||
for k, v in disks.items():
|
||||
new.append(v)
|
||||
else:
|
||||
# golang agent
|
||||
for disk in disks:
|
||||
tmp = {}
|
||||
for k, v in disk.items():
|
||||
tmp["device"] = disk["device"]
|
||||
tmp["fstype"] = disk["fstype"]
|
||||
tmp["total"] = bytes2human(disk["total"])
|
||||
tmp["used"] = bytes2human(disk["used"])
|
||||
tmp["free"] = bytes2human(disk["free"])
|
||||
tmp["percent"] = int(disk["percent"])
|
||||
new.append(tmp)
|
||||
|
||||
serializer.save(disks=new)
|
||||
return Response("ok")
|
||||
|
||||
if "logged_in_username" in request.data.keys():
|
||||
if request.data["logged_in_username"] != "None":
|
||||
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
||||
return Response("ok")
|
||||
|
||||
serializer.save()
|
||||
return Response("ok")
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_seen=djangotime.now())
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Hello(APIView):
|
||||
#### DEPRECATED, for agents <= 1.1.9 ####
|
||||
"""
|
||||
The agent's checkin endpoint
|
||||
patch: called every 30 to 120 seconds
|
||||
@@ -121,17 +214,6 @@ class Hello(APIView):
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_seen=djangotime.now())
|
||||
|
||||
sync_salt_modules_task.delay(agent.pk)
|
||||
get_installed_software.delay(agent.pk)
|
||||
get_wmi_detail_task.delay(agent.pk)
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
|
||||
)
|
||||
|
||||
if not agent.choco_installed:
|
||||
install_chocolatey.delay(agent.pk, wait=True)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -150,31 +232,28 @@ class CheckRunner(APIView):
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializerV3(checks, many=True).data,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request):
|
||||
from logs.models import AuditLog
|
||||
|
||||
check = get_object_or_404(Check, pk=request.data["id"])
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
status = check.handle_checkv2(request.data)
|
||||
|
||||
# create audit entry
|
||||
AuditLog.objects.create(
|
||||
username=check.agent.hostname,
|
||||
agent=check.agent.hostname,
|
||||
object_type="agent",
|
||||
action="check_run",
|
||||
message=f"{check.readable_desc} was run on {check.agent.hostname}. Status: {status}",
|
||||
after_value=Check.serialize(check),
|
||||
)
|
||||
|
||||
return Response(status)
|
||||
|
||||
|
||||
class CheckRunnerInterval(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
return Response({"agent": agent.pk, "check_interval": agent.check_interval})
|
||||
|
||||
|
||||
class TaskRunner(APIView):
|
||||
"""
|
||||
For the windows golang agent
|
||||
@@ -213,77 +292,6 @@ class TaskRunner(APIView):
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SaltMinion(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
ret = {
|
||||
"latestVer": settings.LATEST_SALT_VER,
|
||||
"currentVer": agent.salt_ver,
|
||||
"salt_id": agent.salt_id,
|
||||
"downloadURL": agent.winsalt_dl,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def post(self, request):
|
||||
# accept the salt key
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if agent.salt_id != request.data["saltid"]:
|
||||
return notify_error("Salt keys do not match")
|
||||
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "wheel",
|
||||
"fun": "key.accept",
|
||||
"match": request.data["saltid"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=30,
|
||||
)
|
||||
except Exception:
|
||||
return notify_error("No communication between agent and salt-api")
|
||||
|
||||
try:
|
||||
data = resp.json()["return"][0]["data"]
|
||||
minion = data["return"]["minions"][0]
|
||||
except Exception:
|
||||
return notify_error("Key error")
|
||||
|
||||
if data["success"] and minion == request.data["saltid"]:
|
||||
return Response("Salt key was accepted")
|
||||
else:
|
||||
return notify_error("Not accepted")
|
||||
|
||||
def patch(self, request):
|
||||
# sync modules
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
r = agent.salt_api_cmd(timeout=45, func="saltutil.sync_modules")
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
return notify_error("Failed to sync salt modules")
|
||||
|
||||
if isinstance(r, list) and any("modules" in i for i in r):
|
||||
return Response("Successfully synced salt modules")
|
||||
elif isinstance(r, list) and not r:
|
||||
return Response("Modules are already in sync")
|
||||
else:
|
||||
return notify_error(f"Failed to sync salt modules: {str(r)}")
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
agent.salt_ver = request.data["ver"]
|
||||
agent.save(update_fields=["salt_ver"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class WinUpdater(APIView):
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
@@ -324,6 +332,7 @@ class WinUpdater(APIView):
|
||||
update.installed = True
|
||||
update.save(update_fields=["result", "downloaded", "installed"])
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
# agent calls this after it's finished installing all patches
|
||||
@@ -345,19 +354,11 @@ class WinUpdater(APIView):
|
||||
if reboot:
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
else:
|
||||
agent.salt_api_async(
|
||||
func="system.reboot",
|
||||
arg=7,
|
||||
kwargs={"in_seconds": True},
|
||||
logger.info(
|
||||
f"{agent.hostname} is rebooting after updates were installed."
|
||||
)
|
||||
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
else:
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -386,7 +387,15 @@ class MeshInfo(APIView):
|
||||
|
||||
def patch(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
agent.mesh_node_id = request.data["nodeidhex"]
|
||||
|
||||
if "nodeidhex" in request.data:
|
||||
# agent <= 1.1.0
|
||||
nodeid = request.data["nodeidhex"]
|
||||
else:
|
||||
# agent >= 1.1.1
|
||||
nodeid = request.data["nodeid"]
|
||||
|
||||
agent.mesh_node_id = nodeid
|
||||
agent.save(update_fields=["mesh_node_id"])
|
||||
return Response("ok")
|
||||
|
||||
@@ -476,3 +485,42 @@ class NewAgent(APIView):
|
||||
"token": token.key,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Software(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
raw: SoftwareList = request.data["software"]
|
||||
if not isinstance(raw, list):
|
||||
return notify_error("err")
|
||||
|
||||
sw = filter_software(raw)
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
s = agent.installedsoftware_set.first()
|
||||
s.software = sw
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Installer(APIView):
|
||||
def get(self, request):
|
||||
# used to check if token is valid. will return 401 if not
|
||||
return Response("ok")
|
||||
|
||||
def post(self, request):
|
||||
if "version" not in request.data:
|
||||
return notify_error("Invalid data")
|
||||
|
||||
ver = request.data["version"]
|
||||
if pyver.parse(ver) < pyver.parse(settings.LATEST_AGENT_VER):
|
||||
return notify_error(
|
||||
f"Old installer detected (version {ver} ). Latest version is {settings.LATEST_AGENT_VER} Please generate a new installer from the RMM"
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -6,11 +6,11 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('automation', '0005_auto_20200922_1344'),
|
||||
("automation", "0005_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name='PolicyExclusions',
|
||||
name="PolicyExclusions",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from django.db import models
|
||||
from agents.models import Agent
|
||||
from clients.models import Site, Client
|
||||
from core.models import CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
@@ -58,6 +57,11 @@ class Policy(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def cascade_policy_tasks(agent):
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from logs.models import PendingAction
|
||||
|
||||
# List of all tasks to be applied
|
||||
tasks = list()
|
||||
added_task_pks = list()
|
||||
@@ -80,7 +84,7 @@ class Policy(BaseAuditModel):
|
||||
default_policy = CoreSettings.objects.first().server_policy
|
||||
client_policy = client.server_policy
|
||||
site_policy = site.server_policy
|
||||
else:
|
||||
elif agent.monitoring_type == "workstation":
|
||||
default_policy = CoreSettings.objects.first().workstation_policy
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
@@ -107,6 +111,33 @@ class Policy(BaseAuditModel):
|
||||
tasks.append(task)
|
||||
added_task_pks.append(task.pk)
|
||||
|
||||
# remove policy tasks from agent not included in policy
|
||||
for task in agent.autotasks.filter(
|
||||
parent_task__in=[
|
||||
taskpk
|
||||
for taskpk in agent_tasks_parent_pks
|
||||
if taskpk not in added_task_pks
|
||||
]
|
||||
):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
|
||||
# handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline
|
||||
for action in agent.pendingactions.exclude(status="completed"):
|
||||
task = AutomatedTask.objects.get(pk=action.details["task_id"])
|
||||
if (
|
||||
task.parent_task in agent_tasks_parent_pks
|
||||
and task.parent_task in added_task_pks
|
||||
):
|
||||
agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=agent,
|
||||
action_type="taskaction",
|
||||
details={"action": "taskcreate", "task_id": task.id},
|
||||
).save()
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
||||
|
||||
@staticmethod
|
||||
@@ -132,7 +163,7 @@ class Policy(BaseAuditModel):
|
||||
default_policy = CoreSettings.objects.first().server_policy
|
||||
client_policy = client.server_policy
|
||||
site_policy = site.server_policy
|
||||
else:
|
||||
elif agent.monitoring_type == "workstation":
|
||||
default_policy = CoreSettings.objects.first().workstation_policy
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
@@ -280,6 +311,15 @@ class Policy(BaseAuditModel):
|
||||
+ eventlog_checks
|
||||
)
|
||||
|
||||
# remove policy checks from agent that fell out of policy scope
|
||||
agent.agentchecks.filter(
|
||||
parent_check__in=[
|
||||
checkpk
|
||||
for checkpk in agent_checks_parent_pks
|
||||
if checkpk not in [check.pk for check in final_list]
|
||||
]
|
||||
).delete()
|
||||
|
||||
return [
|
||||
check for check in final_list if check.pk not in agent_checks_parent_pks
|
||||
]
|
||||
|
||||
@@ -6,46 +6,46 @@ from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_checks_from_policies_task(
|
||||
###
|
||||
# copies the policy checks to all affected agents
|
||||
#
|
||||
# clear: clears all policy checks first
|
||||
# create_tasks: also create tasks after checks are generated
|
||||
###
|
||||
policypk,
|
||||
clear=False,
|
||||
create_tasks=False,
|
||||
):
|
||||
def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
for agent in policy.related_agents():
|
||||
agent.generate_checks_from_policies(clear=clear)
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents()
|
||||
|
||||
for agent in agents:
|
||||
agent.generate_checks_from_policies()
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies(
|
||||
clear=clear,
|
||||
)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_checks_by_location_task(
|
||||
location, mon_type, clear=False, create_tasks=False
|
||||
):
|
||||
def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False):
|
||||
|
||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies(clear=clear)
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_all_agent_checks_task(mon_type, clear=False, create_tasks=False):
|
||||
def generate_all_agent_checks_task(mon_type, create_tasks=False):
|
||||
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies(clear=clear)
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -83,18 +83,32 @@ def update_policy_check_fields_task(checkpk):
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_tasks_from_policies_task(policypk, clear=False):
|
||||
def generate_agent_tasks_from_policies_task(policypk):
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
for agent in policy.related_agents():
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents()
|
||||
|
||||
for agent in agents:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_tasks_by_location_task(location, mon_type, clear=False):
|
||||
def generate_agent_tasks_by_location_task(location, mon_type):
|
||||
|
||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
|
||||
@@ -121,9 +121,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
mock_checks_task.assert_called_with(
|
||||
policypk=policy.pk, clear=True, create_tasks=True
|
||||
)
|
||||
mock_checks_task.assert_called_with(policypk=policy.pk, create_tasks=True)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@@ -140,8 +138,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
mock_checks_task.assert_called_with(policypk=policy.pk, clear=True)
|
||||
mock_tasks_task.assert_called_with(policypk=policy.pk, clear=True)
|
||||
mock_checks_task.assert_called_with(policypk=policy.pk)
|
||||
mock_tasks_task.assert_called_with(policypk=policy.pk)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@@ -298,7 +296,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -311,7 +308,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -324,7 +320,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site_id": site.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -337,7 +332,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -347,7 +341,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_task.assert_called_with(clear=True)
|
||||
mock_checks_task.assert_called()
|
||||
mock_checks_task.reset_mock()
|
||||
|
||||
# Adding the same relations shouldn't trigger mocks
|
||||
@@ -396,7 +390,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -409,7 +402,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -422,7 +414,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site_id": site.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -435,7 +426,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -444,7 +434,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
resp = self.client.post(url, agent_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
# called because the relation changed
|
||||
mock_checks_task.assert_called_with(clear=True)
|
||||
mock_checks_task.assert_called()
|
||||
mock_checks_task.reset_mock()
|
||||
|
||||
# adding the same relations shouldn't trigger mocks
|
||||
@@ -753,7 +743,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
||||
|
||||
# test policy assigned to agent
|
||||
generate_agent_checks_from_policies_task(policy.id, clear=True)
|
||||
generate_agent_checks_from_policies_task(policy.id)
|
||||
|
||||
# make sure all checks were created. should be 7
|
||||
agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all()
|
||||
@@ -832,7 +822,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
generate_agent_checks_by_location_task(
|
||||
{"site_id": sites[0].id},
|
||||
"server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -846,7 +835,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
generate_agent_checks_by_location_task(
|
||||
{"site__client_id": clients[0].id},
|
||||
"workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
# workstation_agent should now have policy checks and the other agents should not
|
||||
@@ -875,7 +863,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
core.workstation_policy = policy
|
||||
core.save()
|
||||
|
||||
generate_all_agent_checks_task("server", clear=True, create_tasks=True)
|
||||
generate_all_agent_checks_task("server", create_tasks=True)
|
||||
|
||||
# all servers should have 7 checks
|
||||
for agent in server_agents:
|
||||
@@ -884,7 +872,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
for agent in workstation_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||
|
||||
generate_all_agent_checks_task("workstation", clear=True, create_tasks=True)
|
||||
generate_all_agent_checks_task("workstation", create_tasks=True)
|
||||
|
||||
# all agents should have 7 checks now
|
||||
for agent in server_agents:
|
||||
@@ -961,7 +949,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
||||
|
||||
generate_agent_tasks_from_policies_task(policy.id, clear=True)
|
||||
generate_agent_tasks_from_policies_task(policy.id)
|
||||
|
||||
agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all()
|
||||
|
||||
@@ -1000,9 +988,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
agent1 = baker.make_recipe("agents.agent", site=sites[1])
|
||||
agent2 = baker.make_recipe("agents.agent", site=sites[3])
|
||||
|
||||
generate_agent_tasks_by_location_task(
|
||||
{"site_id": sites[0].id}, "server", clear=True
|
||||
)
|
||||
generate_agent_tasks_by_location_task({"site_id": sites[0].id}, "server")
|
||||
|
||||
# all servers in site1 and site2 should have 3 tasks
|
||||
self.assertEqual(
|
||||
@@ -1013,7 +999,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0)
|
||||
|
||||
generate_agent_tasks_by_location_task(
|
||||
{"site__client_id": clients[0].id}, "workstation", clear=True
|
||||
{"site__client_id": clients[0].id}, "workstation"
|
||||
)
|
||||
|
||||
# all workstations in Default1 should have 3 tasks
|
||||
@@ -1051,10 +1037,13 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
for task in tasks:
|
||||
run_win_task.assert_any_call(task.id)
|
||||
|
||||
def test_update_policy_tasks(self):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_update_policy_tasks(self, nats_cmd):
|
||||
from .tasks import update_policy_task_fields_task
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
nats_cmd.return_value = "ok"
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make(
|
||||
|
||||
@@ -83,7 +83,6 @@ class GetUpdateDeletePolicy(APIView):
|
||||
if saved_policy.active != old_active or saved_policy.enforced != old_enforced:
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
policypk=policy.pk,
|
||||
clear=(not saved_policy.active or not saved_policy.enforced),
|
||||
create_tasks=(saved_policy.active != old_active),
|
||||
)
|
||||
|
||||
@@ -93,8 +92,8 @@ class GetUpdateDeletePolicy(APIView):
|
||||
policy = get_object_or_404(Policy, pk=pk)
|
||||
|
||||
# delete all managed policy checks off of agents
|
||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk, clear=True)
|
||||
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk, clear=True)
|
||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk)
|
||||
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk)
|
||||
policy.delete()
|
||||
|
||||
return Response("ok")
|
||||
@@ -218,7 +217,6 @@ class GetRelated(APIView):
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -236,7 +234,6 @@ class GetRelated(APIView):
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -258,7 +255,6 @@ class GetRelated(APIView):
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -276,7 +272,6 @@ class GetRelated(APIView):
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -296,7 +291,6 @@ class GetRelated(APIView):
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -311,7 +305,6 @@ class GetRelated(APIView):
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -329,7 +322,6 @@ class GetRelated(APIView):
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -343,7 +335,6 @@ class GetRelated(APIView):
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.pk},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -358,14 +349,14 @@ class GetRelated(APIView):
|
||||
if not agent.policy or agent.policy and agent.policy.pk != policy.pk:
|
||||
agent.policy = policy
|
||||
agent.save()
|
||||
agent.generate_checks_from_policies(clear=True)
|
||||
agent.generate_tasks_from_policies(clear=True)
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
else:
|
||||
if agent.policy:
|
||||
agent.policy = None
|
||||
agent.save()
|
||||
agent.generate_checks_from_policies(clear=True)
|
||||
agent.generate_tasks_from_policies(clear=True)
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -422,11 +413,15 @@ class UpdatePatchPolicy(APIView):
|
||||
|
||||
agents = None
|
||||
if "client" in request.data:
|
||||
agents = Agent.objects.filter(site__client_id=request.data["client"])
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||
site__client_id=request.data["client"]
|
||||
)
|
||||
elif "site" in request.data:
|
||||
agents = Agent.objects.filter(site_id=request.data["site"])
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||
site_id=request.data["site"]
|
||||
)
|
||||
else:
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").only("pk")
|
||||
|
||||
for agent in agents:
|
||||
winupdatepolicy = agent.winupdatepolicy.get()
|
||||
|
||||
@@ -7,7 +7,7 @@ class Command(BaseCommand):
|
||||
help = "Checks for orphaned tasks on all agents and removes them"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "last_seen", "overdue_time")
|
||||
online = [i for i in agents if i.status == "online"]
|
||||
for agent in online:
|
||||
remove_orphaned_win_tasks.delay(agent.pk)
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-29 09:12
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0008_auto_20201030_1515"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="automatedtask",
|
||||
name="run_time_bit_weekdays",
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
from django.db import migrations
|
||||
from tacticalrmm.utils import get_bit_days
|
||||
|
||||
DAYS_OF_WEEK = {
|
||||
0: "Monday",
|
||||
1: "Tuesday",
|
||||
2: "Wednesday",
|
||||
3: "Thursday",
|
||||
4: "Friday",
|
||||
5: "Saturday",
|
||||
6: "Sunday",
|
||||
}
|
||||
|
||||
|
||||
def migrate_days(apps, schema_editor):
|
||||
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
|
||||
for task in AutomatedTask.objects.exclude(run_time_days__isnull=True).exclude(
|
||||
run_time_days=[]
|
||||
):
|
||||
run_days = [DAYS_OF_WEEK.get(day) for day in task.run_time_days]
|
||||
task.run_time_bit_weekdays = get_bit_days(run_days)
|
||||
task.save(update_fields=["run_time_bit_weekdays"])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0009_automatedtask_run_time_bit_weekdays"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_days),
|
||||
]
|
||||
@@ -6,8 +6,8 @@ import datetime as dt
|
||||
from django.db import models
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models.fields import DateTimeField
|
||||
from automation.models import Policy
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
RUN_TIME_DAY_CHOICES = [
|
||||
(0, "Monday"),
|
||||
@@ -42,7 +42,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
blank=True,
|
||||
)
|
||||
policy = models.ForeignKey(
|
||||
Policy,
|
||||
"automation.Policy",
|
||||
related_name="autotasks",
|
||||
null=True,
|
||||
blank=True,
|
||||
@@ -69,6 +69,8 @@ class AutomatedTask(BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
name = models.CharField(max_length=255)
|
||||
run_time_bit_weekdays = models.IntegerField(null=True, blank=True)
|
||||
# run_time_days is deprecated, use bit weekdays
|
||||
run_time_days = ArrayField(
|
||||
models.IntegerField(choices=RUN_TIME_DAY_CHOICES, null=True, blank=True),
|
||||
null=True,
|
||||
@@ -107,21 +109,12 @@ class AutomatedTask(BaseAuditModel):
|
||||
elif self.task_type == "runonce":
|
||||
return f'Run once on {self.run_time_date.strftime("%m/%d/%Y %I:%M%p")}'
|
||||
elif self.task_type == "scheduled":
|
||||
ret = []
|
||||
for i in self.run_time_days:
|
||||
for j in RUN_TIME_DAY_CHOICES:
|
||||
if i in j:
|
||||
ret.append(j[1][0:3])
|
||||
|
||||
run_time_nice = dt.datetime.strptime(
|
||||
self.run_time_minute, "%H:%M"
|
||||
).strftime("%I:%M %p")
|
||||
|
||||
if len(ret) == 7:
|
||||
return f"Every day at {run_time_nice}"
|
||||
else:
|
||||
days = ",".join(ret)
|
||||
return f"{days} at {run_time_nice}"
|
||||
days = bitdays_to_string(self.run_time_bit_weekdays)
|
||||
return f"{days} at {run_time_nice}"
|
||||
|
||||
@property
|
||||
def last_run_as_timezone(self):
|
||||
@@ -169,6 +162,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
name=self.name,
|
||||
run_time_days=self.run_time_days,
|
||||
run_time_minute=self.run_time_minute,
|
||||
run_time_bit_weekdays=self.run_time_bit_weekdays,
|
||||
run_time_date=self.run_time_date,
|
||||
task_type=self.task_type,
|
||||
win_task_name=self.win_task_name,
|
||||
|
||||
@@ -1,52 +1,37 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
from loguru import logger
|
||||
from tacticalrmm.celery import app
|
||||
from django.conf import settings
|
||||
import pytz
|
||||
from django.utils import timezone as djangotime
|
||||
from packaging import version as pyver
|
||||
|
||||
from .models import AutomatedTask
|
||||
from logs.models import PendingAction
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
DAYS_OF_WEEK = {
|
||||
0: "Monday",
|
||||
1: "Tuesday",
|
||||
2: "Wednesday",
|
||||
3: "Thursday",
|
||||
4: "Friday",
|
||||
5: "Saturday",
|
||||
6: "Sunday",
|
||||
}
|
||||
|
||||
|
||||
@app.task
|
||||
def create_win_task_schedule(pk, pending_action=False):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if task.task_type == "scheduled":
|
||||
run_days = [DAYS_OF_WEEK.get(day) for day in task.run_time_days]
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task.win_task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {task.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Weekly",
|
||||
f'start_time="{task.run_time_minute}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
kwargs={"days_of_week": run_days},
|
||||
)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": task.run_time_bit_weekdays,
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
"hour": dt.datetime.strptime(task.run_time_minute, "%H:%M").hour,
|
||||
"min": dt.datetime.strptime(task.run_time_minute, "%H:%M").minute,
|
||||
},
|
||||
}
|
||||
|
||||
elif task.task_type == "runonce":
|
||||
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(task.agent.timezone)
|
||||
task_time_utc = task.run_time_date.replace(tzinfo=agent_tz).astimezone(pytz.utc)
|
||||
@@ -57,47 +42,48 @@ def create_win_task_schedule(pk, pending_action=False):
|
||||
) + djangotime.timedelta(minutes=5)
|
||||
task.save()
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task.win_task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {task.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
f'start_date="{task.run_time_date.strftime("%Y-%m-%d")}"',
|
||||
f'start_time="{task.run_time_date.strftime("%H:%M")}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"start_when_available=True",
|
||||
],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
"year": int(dt.datetime.strftime(task.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(task.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(task.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(task.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(task.run_time_date, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if task.remove_if_not_scheduled and pyver.parse(
|
||||
task.agent.version
|
||||
) >= pyver.parse("1.1.2"):
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
elif task.task_type == "checkfailure" or task.task_type == "manual":
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task.win_task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {task.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
'start_date="1975-01-01"',
|
||||
'start_time="01:00"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
},
|
||||
}
|
||||
else:
|
||||
return "error"
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=task.agent,
|
||||
action_type="taskaction",
|
||||
@@ -129,13 +115,16 @@ def create_win_task_schedule(pk, pending_action=False):
|
||||
def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.edit_task",
|
||||
arg=[f"name={task.win_task_name}", f"enabled={action}"],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "enableschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": task.win_task_name,
|
||||
"enabled": action,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data))
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
PendingAction(
|
||||
@@ -150,9 +139,6 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
logger.error(
|
||||
f"Unable to update the scheduled task {task.win_task_name} on {task.agent.hostname}. It will be updated when the agent checks in."
|
||||
)
|
||||
return
|
||||
|
||||
# clear pending action since it was successful
|
||||
@@ -163,7 +149,7 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
|
||||
task.sync_status = "synced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
logger.info(f"{task.agent.hostname} task {task.name} was edited.")
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@@ -171,15 +157,19 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
def delete_win_task_schedule(pk, pending_action=False):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.delete_task",
|
||||
arg=[f"name={task.win_task_name}"],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": task.win_task_name},
|
||||
}
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=task.agent,
|
||||
action_type="taskaction",
|
||||
@@ -188,10 +178,7 @@ def delete_win_task_schedule(pk, pending_action=False):
|
||||
task.sync_status = "pendingdeletion"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
logger.error(
|
||||
f"Unable to delete scheduled task {task.win_task_name} on {task.agent.hostname}. It was marked pending deletion and will be removed when the agent checks in."
|
||||
)
|
||||
return
|
||||
return "timeout"
|
||||
|
||||
# complete pending action since it was successful
|
||||
if pending_action:
|
||||
@@ -199,16 +186,17 @@ def delete_win_task_schedule(pk, pending_action=False):
|
||||
pendingaction.status = "completed"
|
||||
pendingaction.save(update_fields=["status"])
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
task.delete()
|
||||
logger.info(f"{task.agent.hostname} task {task.name} was deleted.")
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def run_win_task(pk):
|
||||
# TODO deprecated, remove this function once salt gone
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
r = task.agent.salt_api_async(func="task.run", arg=[f"name={task.win_task_name}"])
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
return "ok"
|
||||
|
||||
|
||||
@@ -220,18 +208,9 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
|
||||
logger.info(f"Orphaned task cleanup initiated on {agent.hostname}.")
|
||||
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="task.list_tasks",
|
||||
)
|
||||
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
logger.error(
|
||||
f"Unable to clean up scheduled tasks on {agent.hostname}. Agent might be offline"
|
||||
)
|
||||
return "errtimeout"
|
||||
|
||||
if not isinstance(r, list):
|
||||
if not isinstance(r, list) and not r: # empty list
|
||||
logger.error(f"Unable to clean up scheduled tasks on {agent.hostname}: {r}")
|
||||
return "notlist"
|
||||
|
||||
@@ -240,7 +219,8 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
exclude_tasks = (
|
||||
"TacticalRMM_fixmesh",
|
||||
"TacticalRMM_SchedReboot",
|
||||
"TacticalRMM_saltwatchdog", # will be implemented in future
|
||||
"TacticalRMM_sync",
|
||||
"TacticalRMM_agentupdate",
|
||||
)
|
||||
|
||||
for task in r:
|
||||
@@ -250,16 +230,16 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
|
||||
if task.startswith("TacticalRMM_") and task not in agent_task_names:
|
||||
# delete task since it doesn't exist in UI
|
||||
ret = agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.delete_task",
|
||||
arg=[f"name={task}"],
|
||||
)
|
||||
if isinstance(ret, bool) and ret is True:
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
else:
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": task},
|
||||
}
|
||||
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if ret != "ok":
|
||||
logger.error(
|
||||
f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}"
|
||||
)
|
||||
else:
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
|
||||
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import datetime as dt
|
||||
from unittest.mock import patch, call
|
||||
from model_bakery import baker
|
||||
from django.utils import timezone as djangotime
|
||||
@@ -25,9 +26,9 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
# setup data
|
||||
script = baker.make_recipe("scripts.script")
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_old = baker.make_recipe("agents.agent", version="0.9.0")
|
||||
policy = baker.make("automation.Policy")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
|
||||
# test script set to invalid pk
|
||||
data = {"autotask": {"script": 500}}
|
||||
@@ -50,10 +51,10 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test invalid agent version
|
||||
# test old agent version
|
||||
data = {
|
||||
"autotask": {"script": script.id, "script_args": ["args"]},
|
||||
"agent": agent_old.id,
|
||||
"autotask": {"script": script.id},
|
||||
"agent": old_agent.id,
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
@@ -63,7 +64,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
data = {
|
||||
"autotask": {
|
||||
"name": "Test Task Scheduled with Assigned Check",
|
||||
"run_time_days": [0, 1, 2],
|
||||
"run_time_days": ["Sunday", "Monday", "Friday"],
|
||||
"run_time_minute": "10:00",
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
@@ -84,6 +85,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
data = {
|
||||
"autotask": {
|
||||
"name": "Test Task Manual",
|
||||
"run_time_days": [],
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
"script": script.id,
|
||||
@@ -213,8 +215,8 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_remove_orphaned_win_task(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_remove_orphaned_win_task(self, nats_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
self.task1 = AutomatedTask.objects.create(
|
||||
agent=self.agent,
|
||||
@@ -222,20 +224,6 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
)
|
||||
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, "errtimeout")
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, "errtimeout")
|
||||
|
||||
salt_api_cmd.return_value = "task not found in"
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, "notlist")
|
||||
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# test removing an orphaned task
|
||||
win_tasks = [
|
||||
"Adobe Acrobat Update Task",
|
||||
@@ -250,50 +238,54 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
]
|
||||
|
||||
self.calls = [
|
||||
call(timeout=15, func="task.list_tasks"),
|
||||
call({"func": "listschedtasks"}, timeout=10),
|
||||
call(
|
||||
timeout=20,
|
||||
func="task.delete_task",
|
||||
arg=["name=TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb"],
|
||||
{
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": "TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb"
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
),
|
||||
]
|
||||
|
||||
salt_api_cmd.side_effect = [win_tasks, True]
|
||||
nats_cmd.side_effect = [win_tasks, "ok"]
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(salt_api_cmd.call_count, 2)
|
||||
salt_api_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(nats_cmd.call_count, 2)
|
||||
nats_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test salt delete_task fail
|
||||
salt_api_cmd.reset_mock()
|
||||
salt_api_cmd.side_effect = [win_tasks, False]
|
||||
# test nats delete task fail
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.side_effect = [win_tasks, "error deleting task"]
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
salt_api_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(salt_api_cmd.call_count, 2)
|
||||
nats_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(nats_cmd.call_count, 2)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# no orphaned tasks
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
win_tasks.remove("TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb")
|
||||
salt_api_cmd.side_effect = [win_tasks, True]
|
||||
nats_cmd.side_effect = [win_tasks, "ok"]
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(salt_api_cmd.call_count, 1)
|
||||
self.assertEqual(nats_cmd.call_count, 1)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async")
|
||||
def test_run_win_task(self, salt_api_async):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_win_task(self, nats_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
self.task1 = AutomatedTask.objects.create(
|
||||
agent=self.agent,
|
||||
name="test task 1",
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
)
|
||||
salt_api_async.return_value = "Response 200"
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = run_win_task.s(self.task1.pk).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_create_win_task_schedule(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_create_win_task_schedule(self, nats_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
@@ -303,46 +295,32 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
name="test task 1",
|
||||
win_task_name=task_name,
|
||||
task_type="scheduled",
|
||||
run_time_days=[0, 1, 6],
|
||||
run_time_bit_weekdays=127,
|
||||
run_time_minute="21:55",
|
||||
)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(salt_api_cmd.call_count, 1)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task1.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Weekly",
|
||||
'start_time="21:55"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
kwargs={"days_of_week": ["Monday", "Tuesday", "Sunday"]},
|
||||
self.assertEqual(nats_cmd.call_count, 1)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": 127,
|
||||
"pk": self.task1.pk,
|
||||
"name": task_name,
|
||||
"hour": 21,
|
||||
"min": 55,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "synced")
|
||||
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
|
||||
salt_api_cmd.return_value = False
|
||||
nats_cmd.return_value = "timeout"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
@@ -353,7 +331,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
agent=self.agent, action_type="taskaction"
|
||||
)
|
||||
self.assertEqual(self.pending_action.status, "pending")
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(
|
||||
pk=self.task1.pk, pending_action=self.pending_action.pk
|
||||
).apply()
|
||||
@@ -362,7 +340,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
self.assertEqual(self.pending_action.status, "completed")
|
||||
|
||||
# test runonce with future date
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
run_time_date = djangotime.now() + djangotime.timedelta(hours=22)
|
||||
self.task2 = AutomatedTask.objects.create(
|
||||
@@ -372,30 +350,29 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="runonce",
|
||||
run_time_date=run_time_date,
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task2.pk, pending_action=False).apply()
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task2.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
f'start_date="{run_time_date.strftime("%Y-%m-%d")}"',
|
||||
f'start_time="{run_time_date.strftime("%H:%M")}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"start_when_available=True",
|
||||
],
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": self.task2.pk,
|
||||
"name": task_name,
|
||||
"year": int(dt.datetime.strftime(self.task2.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(self.task2.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(self.task2.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(self.task2.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")),
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test runonce with date in the past
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
run_time_date = djangotime.now() - djangotime.timedelta(days=13)
|
||||
self.task3 = AutomatedTask.objects.create(
|
||||
@@ -405,31 +382,13 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="runonce",
|
||||
run_time_date=run_time_date,
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task3.pk, pending_action=False).apply()
|
||||
self.task3 = AutomatedTask.objects.get(pk=self.task3.pk)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task3.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
f'start_date="{self.task3.run_time_date.strftime("%Y-%m-%d")}"',
|
||||
f'start_time="{self.task3.run_time_date.strftime("%H:%M")}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"start_when_available=True",
|
||||
],
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test checkfailure
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
self.check = baker.make_recipe("checks.diskspace_check", agent=self.agent)
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
self.task4 = AutomatedTask.objects.create(
|
||||
@@ -439,29 +398,24 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="checkfailure",
|
||||
assigned_check=self.check,
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task4.pk, pending_action=False).apply()
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task4.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
'start_date="1975-01-01"',
|
||||
'start_time="01:00"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": self.task4.pk,
|
||||
"name": task_name,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test manual
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
self.task5 = AutomatedTask.objects.create(
|
||||
agent=self.agent,
|
||||
@@ -469,23 +423,18 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
win_task_name=task_name,
|
||||
task_type="manual",
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task5.pk, pending_action=False).apply()
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task5.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
'start_date="1975-01-01"',
|
||||
'start_time="01:00"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": self.task5.pk,
|
||||
"name": task_name,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -20,7 +20,7 @@ from .tasks import (
|
||||
delete_win_task_schedule,
|
||||
enable_or_disable_win_task,
|
||||
)
|
||||
from tacticalrmm.utils import notify_error
|
||||
from tacticalrmm.utils import notify_error, get_bit_days
|
||||
|
||||
|
||||
class AddAutoTask(APIView):
|
||||
@@ -38,17 +38,20 @@ class AddAutoTask(APIView):
|
||||
parent = {"policy": policy}
|
||||
else:
|
||||
agent = get_object_or_404(Agent, pk=data["agent"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
parent = {"agent": agent}
|
||||
added = "0.11.0"
|
||||
if data["autotask"]["script_args"] and agent.not_supported(added):
|
||||
return notify_error(
|
||||
f"Script arguments only available in agent {added} or greater"
|
||||
)
|
||||
|
||||
check = None
|
||||
if data["autotask"]["assigned_check"]:
|
||||
check = get_object_or_404(Check, pk=data["autotask"]["assigned_check"])
|
||||
|
||||
bit_weekdays = None
|
||||
if data["autotask"]["run_time_days"]:
|
||||
bit_weekdays = get_bit_days(data["autotask"]["run_time_days"])
|
||||
|
||||
del data["autotask"]["run_time_days"]
|
||||
serializer = TaskSerializer(data=data["autotask"], partial=True, context=parent)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save(
|
||||
@@ -56,6 +59,7 @@ class AddAutoTask(APIView):
|
||||
script=script,
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
assigned_check=check,
|
||||
run_time_bit_weekdays=bit_weekdays,
|
||||
)
|
||||
|
||||
if not "policy" in data:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Check
|
||||
from .models import Check, CheckHistory
|
||||
|
||||
admin.site.register(Check)
|
||||
admin.site.register(CheckHistory)
|
||||
|
||||
30
api/tacticalrmm/checks/migrations/0011_check_run_history.py
Normal file
30
api/tacticalrmm/checks/migrations/0011_check_run_history.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-09 02:56
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0010_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="check",
|
||||
name="run_history",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.PositiveIntegerField(),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
]
|
||||
39
api/tacticalrmm/checks/migrations/0011_checkhistory.py
Normal file
39
api/tacticalrmm/checks/migrations/0011_checkhistory.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-09 21:36
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0010_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="CheckHistory",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("x", models.DateTimeField()),
|
||||
("y", models.PositiveIntegerField()),
|
||||
("results", models.JSONField(blank=True, null=True)),
|
||||
(
|
||||
"check_history",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="check_history",
|
||||
to="checks.check",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/checks/migrations/0012_auto_20210110_0503.py
Normal file
18
api/tacticalrmm/checks/migrations/0012_auto_20210110_0503.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-10 05:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0011_checkhistory"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="y",
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py
Normal file
18
api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-10 05:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0012_auto_20210110_0503"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="y",
|
||||
field=models.PositiveIntegerField(null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,13 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0013_auto_20210110_0505"),
|
||||
("checks", "0011_check_run_history"),
|
||||
]
|
||||
|
||||
operations = []
|
||||
27
api/tacticalrmm/checks/migrations/0015_auto_20210110_1808.py
Normal file
27
api/tacticalrmm/checks/migrations/0015_auto_20210110_1808.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0014_merge_20210110_1808"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="check",
|
||||
name="run_history",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="x",
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="y",
|
||||
field=models.PositiveIntegerField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
@@ -3,12 +3,13 @@ import string
|
||||
import os
|
||||
import json
|
||||
import pytz
|
||||
from statistics import mean
|
||||
from statistics import mean, mode
|
||||
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from rest_framework.fields import JSONField
|
||||
|
||||
from core.models import CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
@@ -214,6 +215,9 @@ class Check(BaseAuditModel):
|
||||
"modified_time",
|
||||
]
|
||||
|
||||
def add_check_history(self, value, more_info=None):
|
||||
CheckHistory.objects.create(check_history=self, y=value, results=more_info)
|
||||
|
||||
def handle_checkv2(self, data):
|
||||
# cpuload or mem checks
|
||||
if self.check_type == "cpuload" or self.check_type == "memory":
|
||||
@@ -232,6 +236,9 @@ class Check(BaseAuditModel):
|
||||
else:
|
||||
self.status = "passing"
|
||||
|
||||
# add check history
|
||||
self.add_check_history(data["percent"])
|
||||
|
||||
# diskspace checks
|
||||
elif self.check_type == "diskspace":
|
||||
if data["exists"]:
|
||||
@@ -245,6 +252,9 @@ class Check(BaseAuditModel):
|
||||
self.status = "passing"
|
||||
|
||||
self.more_info = f"Total: {total}B, Free: {free}B"
|
||||
|
||||
# add check history
|
||||
self.add_check_history(percent_used)
|
||||
else:
|
||||
self.status = "failing"
|
||||
self.more_info = f"Disk {self.disk} does not exist"
|
||||
@@ -277,6 +287,17 @@ class Check(BaseAuditModel):
|
||||
]
|
||||
)
|
||||
|
||||
# add check history
|
||||
self.add_check_history(
|
||||
1 if self.status == "failing" else 0,
|
||||
{
|
||||
"retcode": data["retcode"],
|
||||
"stdout": data["stdout"][:60],
|
||||
"stderr": data["stderr"][:60],
|
||||
"execution_time": self.execution_time,
|
||||
},
|
||||
)
|
||||
|
||||
# ping checks
|
||||
elif self.check_type == "ping":
|
||||
success = ["Reply", "bytes", "time", "TTL"]
|
||||
@@ -293,6 +314,10 @@ class Check(BaseAuditModel):
|
||||
self.more_info = output
|
||||
self.save(update_fields=["more_info"])
|
||||
|
||||
self.add_check_history(
|
||||
1 if self.status == "failing" else 0, self.more_info[:60]
|
||||
)
|
||||
|
||||
# windows service checks
|
||||
elif self.check_type == "winsvc":
|
||||
svc_stat = data["status"]
|
||||
@@ -332,6 +357,10 @@ class Check(BaseAuditModel):
|
||||
|
||||
self.save(update_fields=["more_info"])
|
||||
|
||||
self.add_check_history(
|
||||
1 if self.status == "failing" else 0, self.more_info[:60]
|
||||
)
|
||||
|
||||
elif self.check_type == "eventlog":
|
||||
log = []
|
||||
is_wildcard = self.event_id_is_wildcard
|
||||
@@ -391,6 +420,11 @@ class Check(BaseAuditModel):
|
||||
self.extra_details = {"log": log}
|
||||
self.save(update_fields=["extra_details"])
|
||||
|
||||
self.add_check_history(
|
||||
1 if self.status == "failing" else 0,
|
||||
"Events Found:" + str(len(self.extra_details["log"])),
|
||||
)
|
||||
|
||||
# handle status
|
||||
if self.status == "failing":
|
||||
self.fail_count += 1
|
||||
@@ -411,42 +445,6 @@ class Check(BaseAuditModel):
|
||||
|
||||
return self.status
|
||||
|
||||
def handle_check(self, data):
|
||||
if self.check_type != "cpuload" and self.check_type != "memory":
|
||||
|
||||
if data["status"] == "passing" and self.fail_count != 0:
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["fail_count"])
|
||||
|
||||
elif data["status"] == "failing":
|
||||
self.fail_count += 1
|
||||
self.save(update_fields=["fail_count"])
|
||||
|
||||
else:
|
||||
self.history.append(data["percent"])
|
||||
|
||||
if len(self.history) > 15:
|
||||
self.history = self.history[-15:]
|
||||
|
||||
self.save(update_fields=["history"])
|
||||
|
||||
avg = int(mean(self.history))
|
||||
|
||||
if avg > self.threshold:
|
||||
self.status = "failing"
|
||||
self.fail_count += 1
|
||||
self.save(update_fields=["status", "fail_count"])
|
||||
else:
|
||||
self.status = "passing"
|
||||
if self.fail_count != 0:
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["status", "fail_count"])
|
||||
else:
|
||||
self.save(update_fields=["status"])
|
||||
|
||||
if self.email_alert and self.fail_count >= self.fails_b4_alert:
|
||||
handle_check_email_alert_task.delay(self.pk)
|
||||
|
||||
@staticmethod
|
||||
def serialize(check):
|
||||
# serializes the check and returns json
|
||||
@@ -645,3 +643,17 @@ class Check(BaseAuditModel):
|
||||
body = subject
|
||||
|
||||
CORE.send_sms(body)
|
||||
|
||||
|
||||
class CheckHistory(models.Model):
|
||||
check_history = models.ForeignKey(
|
||||
Check,
|
||||
related_name="check_history",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
x = models.DateTimeField(auto_now_add=True)
|
||||
y = models.PositiveIntegerField(null=True, blank=True, default=None)
|
||||
results = models.JSONField(null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.check_history.readable_desc
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import validators as _v
|
||||
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from .models import Check
|
||||
from .models import Check, CheckHistory
|
||||
from autotasks.models import AutomatedTask
|
||||
from scripts.serializers import ScriptSerializer, ScriptCheckSerializer
|
||||
|
||||
@@ -65,6 +65,26 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
"Please enter a valid IP address or domain name"
|
||||
)
|
||||
|
||||
if check_type == "cpuload" and not self.instance:
|
||||
if (
|
||||
Check.objects.filter(**self.context, check_type="cpuload")
|
||||
.exclude(managed_by_policy=True)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"A cpuload check for this agent already exists"
|
||||
)
|
||||
|
||||
if check_type == "memory" and not self.instance:
|
||||
if (
|
||||
Check.objects.filter(**self.context, check_type="memory")
|
||||
.exclude(managed_by_policy=True)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"A memory check for this agent already exists"
|
||||
)
|
||||
|
||||
return val
|
||||
|
||||
|
||||
@@ -75,101 +95,7 @@ class AssignedTaskCheckRunnerField(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
# for the windows agent
|
||||
# only send data needed for agent to run a check
|
||||
|
||||
assigned_task = serializers.SerializerMethodField()
|
||||
script = ScriptSerializer(read_only=True)
|
||||
|
||||
def get_assigned_task(self, obj):
|
||||
if obj.assignedtask.exists():
|
||||
# this will not break agents on version 0.10.2 or lower
|
||||
# newer agents once released will properly handle multiple tasks assigned to a check
|
||||
task = obj.assignedtask.first()
|
||||
return AssignedTaskCheckRunnerField(task).data
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
exclude = [
|
||||
"policy",
|
||||
"managed_by_policy",
|
||||
"overriden_by_policy",
|
||||
"parent_check",
|
||||
"name",
|
||||
"more_info",
|
||||
"last_run",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"fails_b4_alert",
|
||||
"fail_count",
|
||||
"email_sent",
|
||||
"text_sent",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"stdout",
|
||||
"stderr",
|
||||
"retcode",
|
||||
"execution_time",
|
||||
"svc_display_name",
|
||||
"svc_policy_mode",
|
||||
"created_by",
|
||||
"created_time",
|
||||
"modified_by",
|
||||
"modified_time",
|
||||
"history",
|
||||
]
|
||||
|
||||
|
||||
class CheckRunnerGetSerializerV2(serializers.ModelSerializer):
|
||||
# for the windows __python__ agent
|
||||
# only send data needed for agent to run a check
|
||||
|
||||
assigned_tasks = serializers.SerializerMethodField()
|
||||
script = ScriptSerializer(read_only=True)
|
||||
|
||||
def get_assigned_tasks(self, obj):
|
||||
if obj.assignedtask.exists():
|
||||
tasks = obj.assignedtask.all()
|
||||
return AssignedTaskCheckRunnerField(tasks, many=True).data
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
exclude = [
|
||||
"policy",
|
||||
"managed_by_policy",
|
||||
"overriden_by_policy",
|
||||
"parent_check",
|
||||
"name",
|
||||
"more_info",
|
||||
"last_run",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"fails_b4_alert",
|
||||
"fail_count",
|
||||
"email_sent",
|
||||
"text_sent",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"stdout",
|
||||
"stderr",
|
||||
"retcode",
|
||||
"execution_time",
|
||||
"svc_display_name",
|
||||
"svc_policy_mode",
|
||||
"created_by",
|
||||
"created_time",
|
||||
"modified_by",
|
||||
"modified_time",
|
||||
"history",
|
||||
]
|
||||
|
||||
|
||||
class CheckRunnerGetSerializerV3(serializers.ModelSerializer):
|
||||
# for the windows __golang__ agent
|
||||
# only send data needed for agent to run a check
|
||||
# the difference here is in the script serializer
|
||||
# script checks no longer rely on salt and are executed directly by the go agent
|
||||
|
||||
assigned_tasks = serializers.SerializerMethodField()
|
||||
script = ScriptCheckSerializer(read_only=True)
|
||||
|
||||
@@ -217,3 +143,15 @@ class CheckResultsSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Check
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class CheckHistorySerializer(serializers.ModelSerializer):
|
||||
x = serializers.SerializerMethodField()
|
||||
|
||||
def get_x(self, obj):
|
||||
return obj.x.astimezone(pytz.timezone(self.context["timezone"])).isoformat()
|
||||
|
||||
# used for return large amounts of graph data
|
||||
class Meta:
|
||||
model = CheckHistory
|
||||
fields = ("x", "y", "results")
|
||||
|
||||
@@ -5,8 +5,6 @@ from time import sleep
|
||||
from tacticalrmm.celery import app
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_check_email_alert_task(pk):
|
||||
@@ -56,3 +54,15 @@ def handle_check_sms_alert_task(pk):
|
||||
check.save(update_fields=["text_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_check_history(older_than_days: int) -> str:
|
||||
from .models import CheckHistory
|
||||
|
||||
CheckHistory.objects.filter(
|
||||
x__lt=djangotime.make_aware(dt.datetime.today())
|
||||
- djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
from checks.models import CheckHistory
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from .serializers import CheckSerializer
|
||||
from django.utils import timezone as djangotime
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
|
||||
class TestCheckViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_get_disk_check(self):
|
||||
# setup data
|
||||
@@ -55,6 +58,52 @@ class TestCheckViews(TacticalTestCase):
|
||||
resp = self.client.post(url, invalid_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
def test_add_cpuload_check(self):
|
||||
url = "/checks/checks/"
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
payload = {
|
||||
"pk": agent.pk,
|
||||
"check": {
|
||||
"check_type": "cpuload",
|
||||
"threshold": 66,
|
||||
"fails_b4_alert": 9,
|
||||
},
|
||||
}
|
||||
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
payload["threshold"] = 87
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
self.assertEqual(
|
||||
resp.json()["non_field_errors"][0],
|
||||
"A cpuload check for this agent already exists",
|
||||
)
|
||||
|
||||
def test_add_memory_check(self):
|
||||
url = "/checks/checks/"
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
payload = {
|
||||
"pk": agent.pk,
|
||||
"check": {
|
||||
"check_type": "memory",
|
||||
"threshold": 78,
|
||||
"fails_b4_alert": 1,
|
||||
},
|
||||
}
|
||||
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
payload["threshold"] = 55
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
self.assertEqual(
|
||||
resp.json()["non_field_errors"][0],
|
||||
"A memory check for this agent already exists",
|
||||
)
|
||||
|
||||
def test_get_policy_disk_check(self):
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy")
|
||||
@@ -134,3 +183,111 @@ class TestCheckViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("patch", url_a)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_checks(self, nats_cmd):
|
||||
agent = baker.make_recipe("agents.agent", version="1.4.1")
|
||||
agent_old = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
agent_b4_141 = baker.make_recipe("agents.agent", version="1.4.0")
|
||||
|
||||
url = f"/checks/runchecks/{agent_old.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.json(), "Requires agent version 1.1.0 or greater")
|
||||
|
||||
url = f"/checks/runchecks/{agent_b4_141.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, wait=False)
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "busy"
|
||||
url = f"/checks/runchecks/{agent.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), f"Checks are already running on {agent.hostname}")
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "ok"
|
||||
url = f"/checks/runchecks/{agent.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), f"Checks will now be re-run on {agent.hostname}")
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "timeout"
|
||||
url = f"/checks/runchecks/{agent.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), "Unable to contact the agent")
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_check_history(self):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
|
||||
check_history_data = baker.make(
|
||||
"checks.CheckHistory",
|
||||
check_history=check,
|
||||
_quantity=30,
|
||||
)
|
||||
|
||||
# need to manually set the date back 35 days
|
||||
for check_history in check_history_data:
|
||||
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
|
||||
check_history.save()
|
||||
|
||||
# test invalid check pk
|
||||
resp = self.client.patch("/checks/history/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
url = f"/checks/history/{check.id}/"
|
||||
|
||||
# test with timeFilter last 30 days
|
||||
data = {"timeFilter": 30}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 30)
|
||||
|
||||
# test with timeFilter equal to 0
|
||||
data = {"timeFilter": 0}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 60)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestCheckTasks(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_prune_check_history(self):
|
||||
from .tasks import prune_check_history
|
||||
|
||||
# setup data
|
||||
check = baker.make_recipe("checks.diskspace_check")
|
||||
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
|
||||
check_history_data = baker.make(
|
||||
"checks.CheckHistory",
|
||||
check_history=check,
|
||||
_quantity=30,
|
||||
)
|
||||
|
||||
# need to manually set the date back 35 days
|
||||
for check_history in check_history_data:
|
||||
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
|
||||
check_history.save()
|
||||
|
||||
# prune data 30 days old
|
||||
prune_check_history(30)
|
||||
self.assertEqual(CheckHistory.objects.count(), 30)
|
||||
|
||||
# prune all Check history Data
|
||||
prune_check_history(0)
|
||||
self.assertEqual(CheckHistory.objects.count(), 0)
|
||||
|
||||
@@ -7,4 +7,5 @@ urlpatterns = [
|
||||
path("<pk>/loadchecks/", views.load_checks),
|
||||
path("getalldisks/", views.get_disks_for_policies),
|
||||
path("runchecks/<pk>/", views.run_checks),
|
||||
path("history/<int:checkpk>/", views.CheckHistory.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import asyncio
|
||||
from packaging import version as pyver
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from datetime import datetime as dt
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
@@ -13,7 +18,7 @@ from automation.models import Policy
|
||||
from .models import Check
|
||||
from scripts.models import Script
|
||||
|
||||
from .serializers import CheckSerializer
|
||||
from .serializers import CheckSerializer, CheckHistorySerializer
|
||||
|
||||
|
||||
from automation.tasks import (
|
||||
@@ -36,17 +41,6 @@ class AddCheck(APIView):
|
||||
else:
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
parent = {"agent": agent}
|
||||
added = "0.11.0"
|
||||
if (
|
||||
request.data["check"]["check_type"] == "script"
|
||||
and request.data["check"]["script_args"]
|
||||
and agent.not_supported(version_added=added)
|
||||
):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": f"Script arguments only available in agent {added} or greater"
|
||||
}
|
||||
)
|
||||
|
||||
script = None
|
||||
if "script" in request.data["check"]:
|
||||
@@ -58,13 +52,6 @@ class AddCheck(APIView):
|
||||
request.data["check"]["check_type"] == "eventlog"
|
||||
and request.data["check"]["event_id_is_wildcard"]
|
||||
):
|
||||
if agent and agent.not_supported(version_added="0.10.2"):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": "Wildcard is only available in agent 0.10.2 or greater"
|
||||
}
|
||||
)
|
||||
|
||||
request.data["check"]["event_id"] = 0
|
||||
|
||||
serializer = CheckSerializer(
|
||||
@@ -116,31 +103,8 @@ class GetUpdateDeleteCheck(APIView):
|
||||
pass
|
||||
else:
|
||||
if request.data["event_id_is_wildcard"]:
|
||||
if check.agent.not_supported(version_added="0.10.2"):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": "Wildcard is only available in agent 0.10.2 or greater"
|
||||
}
|
||||
)
|
||||
|
||||
request.data["event_id"] = 0
|
||||
|
||||
elif check.check_type == "script":
|
||||
added = "0.11.0"
|
||||
try:
|
||||
request.data["script_args"]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if request.data["script_args"] and check.agent.not_supported(
|
||||
version_added=added
|
||||
):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": f"Script arguments only available in agent {added} or greater"
|
||||
}
|
||||
)
|
||||
|
||||
serializer = CheckSerializer(instance=check, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
@@ -176,14 +140,46 @@ class GetUpdateDeleteCheck(APIView):
|
||||
return Response(f"{check.readable_desc} was deleted!")
|
||||
|
||||
|
||||
class CheckHistory(APIView):
|
||||
def patch(self, request, checkpk):
|
||||
check = get_object_or_404(Check, pk=checkpk)
|
||||
|
||||
timeFilter = Q()
|
||||
|
||||
if "timeFilter" in request.data:
|
||||
if request.data["timeFilter"] != 0:
|
||||
timeFilter = Q(
|
||||
x__lte=djangotime.make_aware(dt.today()),
|
||||
x__gt=djangotime.make_aware(dt.today())
|
||||
- djangotime.timedelta(days=request.data["timeFilter"]),
|
||||
)
|
||||
|
||||
check_history = check.check_history.filter(timeFilter).order_by("-x")
|
||||
|
||||
return Response(
|
||||
CheckHistorySerializer(
|
||||
check_history, context={"timezone": check.agent.timezone}, many=True
|
||||
).data
|
||||
)
|
||||
|
||||
|
||||
@api_view()
|
||||
def run_checks(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False))
|
||||
return Response(agent.hostname)
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.4.1"):
|
||||
r = asyncio.run(agent.nats_cmd({"func": "runchecks"}, timeout=15))
|
||||
if r == "busy":
|
||||
return notify_error(f"Checks are already running on {agent.hostname}")
|
||||
elif r == "ok":
|
||||
return Response(f"Checks will now be re-run on {agent.hostname}")
|
||||
else:
|
||||
return notify_error("Unable to contact the agent")
|
||||
else:
|
||||
asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False))
|
||||
return Response(f"Checks will now be re-run on {agent.hostname}")
|
||||
|
||||
|
||||
@api_view()
|
||||
|
||||
@@ -6,48 +6,48 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0004_auto_20200821_2115'),
|
||||
("clients", "0004_auto_20200821_2115"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='client',
|
||||
name='created_by',
|
||||
model_name="client",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='client',
|
||||
name='created_time',
|
||||
model_name="client",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='client',
|
||||
name='modified_by',
|
||||
model_name="client",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='client',
|
||||
name='modified_time',
|
||||
model_name="client",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='created_by',
|
||||
model_name="site",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='created_time',
|
||||
model_name="site",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='modified_by',
|
||||
model_name="site",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='modified_time',
|
||||
model_name="site",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -8,24 +8,67 @@ import uuid
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('knox', '0007_auto_20190111_0542'),
|
||||
('clients', '0005_auto_20200922_1344'),
|
||||
("knox", "0007_auto_20190111_0542"),
|
||||
("clients", "0005_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Deployment',
|
||||
name="Deployment",
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('uid', models.UUIDField(default=uuid.uuid4, editable=False)),
|
||||
('mon_type', models.CharField(choices=[('server', 'Server'), ('workstation', 'Workstation')], default='server', max_length=255)),
|
||||
('arch', models.CharField(choices=[('64', '64 bit'), ('32', '32 bit')], default='64', max_length=255)),
|
||||
('expiry', models.DateTimeField(blank=True, null=True)),
|
||||
('token_key', models.CharField(max_length=255)),
|
||||
('install_flags', models.JSONField(blank=True, null=True)),
|
||||
('auth_token', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deploytokens', to='knox.authtoken')),
|
||||
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deployclients', to='clients.client')),
|
||||
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deploysites', to='clients.site')),
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("uid", models.UUIDField(default=uuid.uuid4, editable=False)),
|
||||
(
|
||||
"mon_type",
|
||||
models.CharField(
|
||||
choices=[("server", "Server"), ("workstation", "Workstation")],
|
||||
default="server",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"arch",
|
||||
models.CharField(
|
||||
choices=[("64", "64 bit"), ("32", "32 bit")],
|
||||
default="64",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
("expiry", models.DateTimeField(blank=True, null=True)),
|
||||
("token_key", models.CharField(max_length=255)),
|
||||
("install_flags", models.JSONField(blank=True, null=True)),
|
||||
(
|
||||
"auth_token",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="deploytokens",
|
||||
to="knox.authtoken",
|
||||
),
|
||||
),
|
||||
(
|
||||
"client",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="deployclients",
|
||||
to="clients.client",
|
||||
),
|
||||
),
|
||||
(
|
||||
"site",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="deploysites",
|
||||
to="clients.site",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,18 +6,18 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0006_deployment'),
|
||||
("clients", "0006_deployment"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='client',
|
||||
old_name='client',
|
||||
new_name='name',
|
||||
model_name="client",
|
||||
old_name="client",
|
||||
new_name="name",
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='site',
|
||||
old_name='site',
|
||||
new_name='name',
|
||||
model_name="site",
|
||||
old_name="site",
|
||||
new_name="name",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,16 +6,16 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0007_auto_20201102_1920'),
|
||||
("clients", "0007_auto_20201102_1920"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='client',
|
||||
options={'ordering': ('name',)},
|
||||
name="client",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='site',
|
||||
options={'ordering': ('name',)},
|
||||
name="site",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
]
|
||||
|
||||
@@ -38,7 +38,6 @@ class Client(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def has_failing_checks(self):
|
||||
|
||||
agents = (
|
||||
Agent.objects.only(
|
||||
"pk",
|
||||
@@ -50,14 +49,17 @@ class Client(BaseAuditModel):
|
||||
.filter(site__client=self)
|
||||
.prefetch_related("agentchecks")
|
||||
)
|
||||
|
||||
failing = 0
|
||||
for agent in agents:
|
||||
if agent.checks["has_failing_checks"]:
|
||||
return True
|
||||
failing += 1
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
return agent.status == "overdue"
|
||||
if agent.status == "overdue":
|
||||
failing += 1
|
||||
|
||||
return False
|
||||
return failing > 0
|
||||
|
||||
@staticmethod
|
||||
def serialize(client):
|
||||
@@ -98,7 +100,6 @@ class Site(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def has_failing_checks(self):
|
||||
|
||||
agents = (
|
||||
Agent.objects.only(
|
||||
"pk",
|
||||
@@ -110,14 +111,17 @@ class Site(BaseAuditModel):
|
||||
.filter(site=self)
|
||||
.prefetch_related("agentchecks")
|
||||
)
|
||||
|
||||
failing = 0
|
||||
for agent in agents:
|
||||
if agent.checks["has_failing_checks"]:
|
||||
return True
|
||||
failing += 1
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
return agent.status == "overdue"
|
||||
if agent.status == "overdue":
|
||||
failing += 1
|
||||
|
||||
return False
|
||||
return failing > 0
|
||||
|
||||
@staticmethod
|
||||
def serialize(site):
|
||||
|
||||
@@ -192,7 +192,7 @@ class GenerateAgent(APIView):
|
||||
if not os.path.exists(go_bin):
|
||||
return notify_error("Missing golang")
|
||||
|
||||
api = f"{request.scheme}://{request.get_host()}"
|
||||
api = f"https://{request.get_host()}"
|
||||
inno = (
|
||||
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||
if d.arch == "64"
|
||||
@@ -223,7 +223,7 @@ class GenerateAgent(APIView):
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"build",
|
||||
f"-ldflags=\"-X 'main.Inno={inno}'",
|
||||
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
|
||||
f"-X 'main.Api={api}'",
|
||||
f"-X 'main.Client={d.client.pk}'",
|
||||
f"-X 'main.Site={d.site.pk}'",
|
||||
@@ -282,4 +282,4 @@ class GenerateAgent(APIView):
|
||||
response = HttpResponse()
|
||||
response["Content-Disposition"] = f"attachment; filename={file_name}"
|
||||
response["X-Accel-Redirect"] = f"/private/exe/{file_name}"
|
||||
return response
|
||||
return response
|
||||
|
||||
@@ -56,8 +56,8 @@ func downloadAgent(filepath string) (err error) {
|
||||
func main() {
|
||||
|
||||
debugLog := flag.String("log", "", "Verbose output")
|
||||
localSalt := flag.String("local-salt", "", "Use local salt minion")
|
||||
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
|
||||
silent := flag.Bool("silent", false, "Do not popup any message boxes during installation")
|
||||
cert := flag.String("cert", "", "Path to ca.pem")
|
||||
timeout := flag.String("timeout", "", "Timeout for subprocess calls")
|
||||
flag.Parse()
|
||||
@@ -78,35 +78,35 @@ func main() {
|
||||
}
|
||||
|
||||
if debug {
|
||||
cmdArgs = append(cmdArgs, "--log", "DEBUG")
|
||||
cmdArgs = append(cmdArgs, "-log", "debug")
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*localSalt)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "--local-salt", *localSalt)
|
||||
if *silent {
|
||||
cmdArgs = append(cmdArgs, "-silent")
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*localMesh)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "--local-mesh", *localMesh)
|
||||
cmdArgs = append(cmdArgs, "-local-mesh", *localMesh)
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*cert)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "--cert", *cert)
|
||||
cmdArgs = append(cmdArgs, "-cert", *cert)
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*timeout)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "--timeout", *timeout)
|
||||
cmdArgs = append(cmdArgs, "-timeout", *timeout)
|
||||
}
|
||||
|
||||
if Rdp == "1" {
|
||||
cmdArgs = append(cmdArgs, "--rdp")
|
||||
cmdArgs = append(cmdArgs, "-rdp")
|
||||
}
|
||||
|
||||
if Ping == "1" {
|
||||
cmdArgs = append(cmdArgs, "--ping")
|
||||
cmdArgs = append(cmdArgs, "-ping")
|
||||
}
|
||||
|
||||
if Power == "1" {
|
||||
cmdArgs = append(cmdArgs, "--power")
|
||||
cmdArgs = append(cmdArgs, "-power")
|
||||
}
|
||||
|
||||
if debug {
|
||||
@@ -133,7 +133,7 @@ func main() {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
time.Sleep(20 * time.Second)
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Println("Installation starting.")
|
||||
cmd := exec.Command(tacrmm, cmdArgs...)
|
||||
|
||||
@@ -36,7 +36,7 @@ If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
|
||||
Invoke-WebRequest -Uri $downloadlink -OutFile $OutPath\$output
|
||||
Start-Process -FilePath $OutPath\$output -ArgumentList ('/VERYSILENT /SUPPRESSMSGBOXES') -Wait
|
||||
write-host ('Extracting...')
|
||||
Start-Sleep -s 20
|
||||
Start-Sleep -s 10
|
||||
Start-Process -FilePath "C:\Program Files\TacticalAgent\tacticalrmm.exe" -ArgumentList $installArgs -Wait
|
||||
exit 0
|
||||
}
|
||||
|
||||
@@ -11,12 +11,11 @@ class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, mesh_settings):
|
||||
token = get_auth_token(
|
||||
mesh_settings.mesh_username, mesh_settings.mesh_token
|
||||
)
|
||||
token = get_auth_token(mesh_settings.mesh_username, mesh_settings.mesh_token)
|
||||
|
||||
if settings.MESH_WS_URL:
|
||||
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
|
||||
if settings.DOCKER_BUILD:
|
||||
site = mesh_settings.mesh_site.replace("https", "ws")
|
||||
uri = f"{site}:443/control.ashx?auth={token}"
|
||||
else:
|
||||
site = mesh_settings.mesh_site.replace("https", "wss")
|
||||
uri = f"{site}/control.ashx?auth={token}"
|
||||
|
||||
@@ -12,12 +12,11 @@ class Command(BaseCommand):
|
||||
|
||||
async def websocket_call(self, mesh_settings):
|
||||
|
||||
token = get_auth_token(
|
||||
mesh_settings.mesh_username, mesh_settings.mesh_token
|
||||
)
|
||||
token = get_auth_token(mesh_settings.mesh_username, mesh_settings.mesh_token)
|
||||
|
||||
if settings.MESH_WS_URL:
|
||||
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
|
||||
if settings.DOCKER_BUILD:
|
||||
site = mesh_settings.mesh_site.replace("https", "ws")
|
||||
uri = f"{site}:443/control.ashx?auth={token}"
|
||||
else:
|
||||
site = mesh_settings.mesh_site.replace("https", "wss")
|
||||
uri = f"{site}/control.ashx?auth={token}"
|
||||
@@ -52,11 +51,17 @@ class Command(BaseCommand):
|
||||
|
||||
try:
|
||||
# Check for Mesh Username
|
||||
if not mesh_settings.mesh_username or settings.MESH_USERNAME != mesh_settings.mesh_username:
|
||||
if (
|
||||
not mesh_settings.mesh_username
|
||||
or settings.MESH_USERNAME != mesh_settings.mesh_username
|
||||
):
|
||||
mesh_settings.mesh_username = settings.MESH_USERNAME
|
||||
|
||||
# Check for Mesh Site
|
||||
if not mesh_settings.mesh_site or settings.MESH_SITE != mesh_settings.mesh_site:
|
||||
if (
|
||||
not mesh_settings.mesh_site
|
||||
or settings.MESH_SITE != mesh_settings.mesh_site
|
||||
):
|
||||
mesh_settings.mesh_site = settings.MESH_SITE
|
||||
|
||||
# Check for Mesh Token
|
||||
@@ -75,7 +80,9 @@ class Command(BaseCommand):
|
||||
return
|
||||
|
||||
try:
|
||||
asyncio.get_event_loop().run_until_complete(self.websocket_call(mesh_settings))
|
||||
asyncio.get_event_loop().run_until_complete(
|
||||
self.websocket_call(mesh_settings)
|
||||
)
|
||||
self.stdout.write("Initial Mesh Central setup complete")
|
||||
except websockets.exceptions.ConnectionClosedError:
|
||||
self.stdout.write(
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from time import sleep
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
@@ -15,22 +13,10 @@ class Command(BaseCommand):
|
||||
help = "Collection of tasks to run after updating the rmm, after migrations"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
if not os.path.exists("/usr/local/bin/goversioninfo"):
|
||||
self.stdout.write(self.style.ERROR("*" * 100))
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"ERROR: New update script available. Delete this one and re-download."
|
||||
)
|
||||
)
|
||||
self.stdout.write("\n")
|
||||
sys.exit(1)
|
||||
|
||||
# 10-16-2020 changed the type of the agent's 'disks' model field
|
||||
# from a dict of dicts, to a list of disks in the golang agent
|
||||
# the following will convert dicts to lists for agent's still on the python agent
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "disks")
|
||||
for agent in agents:
|
||||
if agent.disks is not None and isinstance(agent.disks, dict):
|
||||
new = []
|
||||
@@ -43,88 +29,17 @@ class Command(BaseCommand):
|
||||
self.style.SUCCESS(f"Migrated disks on {agent.hostname}")
|
||||
)
|
||||
|
||||
# sync modules. split into chunks of 60 agents to not overload the salt master
|
||||
agents = Agent.objects.all()
|
||||
online = [i.salt_id for i in agents if i.status == "online"]
|
||||
|
||||
chunks = (online[i : i + 60] for i in range(0, len(online), 60))
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Syncing agent modules..."))
|
||||
for chunk in chunks:
|
||||
r = Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
|
||||
sleep(5)
|
||||
|
||||
has_old_config = True
|
||||
rmm_conf = "/etc/nginx/sites-available/rmm.conf"
|
||||
if os.path.exists(rmm_conf):
|
||||
with open(rmm_conf) as f:
|
||||
for line in f:
|
||||
if "location" and "builtin" in line:
|
||||
has_old_config = False
|
||||
break
|
||||
|
||||
if has_old_config:
|
||||
new_conf = """
|
||||
location /builtin/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://rmm.yourwebsite.com";
|
||||
alias /srv/salt/scripts/;
|
||||
}
|
||||
"""
|
||||
|
||||
after_this = """
|
||||
location /saltscripts/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://rmm.yourwebsite.com";
|
||||
alias /srv/salt/scripts/userdefined/;
|
||||
}
|
||||
"""
|
||||
|
||||
self.stdout.write(self.style.ERROR("*" * 100))
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"WARNING: A recent update requires you to manually edit your nginx config"
|
||||
)
|
||||
)
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR("Please add the following location block to ")
|
||||
+ self.style.WARNING(rmm_conf)
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(new_conf))
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"You can paste the above right after the following block that's already in your nginx config:"
|
||||
)
|
||||
)
|
||||
self.stdout.write(after_this)
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"Make sure to replace rmm.yourwebsite.com with your domain"
|
||||
)
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.ERROR("After editing, restart nginx with the command ")
|
||||
+ self.style.WARNING("sudo systemctl restart nginx")
|
||||
)
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(self.style.ERROR("*" * 100))
|
||||
input("Press Enter to continue...")
|
||||
|
||||
# install go
|
||||
if not os.path.exists("/usr/local/rmmgo/"):
|
||||
self.stdout.write(self.style.SUCCESS("Installing golang"))
|
||||
subprocess.run("sudo mkdir -p /usr/local/rmmgo", shell=True)
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
r = subprocess.run(
|
||||
f"wget https://golang.org/dl/go1.15.linux-amd64.tar.gz -P {tmpdir}",
|
||||
f"wget https://golang.org/dl/go1.15.5.linux-amd64.tar.gz -P {tmpdir}",
|
||||
shell=True,
|
||||
)
|
||||
|
||||
gotar = os.path.join(tmpdir, "go1.15.linux-amd64.tar.gz")
|
||||
gotar = os.path.join(tmpdir, "go1.15.5.linux-amd64.tar.gz")
|
||||
|
||||
subprocess.run(f"tar -xzf {gotar} -C {tmpdir}", shell=True)
|
||||
|
||||
|
||||
@@ -6,4 +6,4 @@ class Command(BaseCommand):
|
||||
help = "Reload Nats"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
reload_nats()
|
||||
reload_nats()
|
||||
|
||||
@@ -6,13 +6,13 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0008_auto_20200910_1434'),
|
||||
("core", "0008_auto_20200910_1434"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='agent_auto_update',
|
||||
model_name="coresettings",
|
||||
name="agent_auto_update",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0009_coresettings_agent_auto_update'),
|
||||
("core", "0009_coresettings_agent_auto_update"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='created_by',
|
||||
model_name="coresettings",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='created_time',
|
||||
model_name="coresettings",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='modified_by',
|
||||
model_name="coresettings",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='modified_time',
|
||||
model_name="coresettings",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -7,28 +7,34 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0010_auto_20201002_1257'),
|
||||
("core", "0010_auto_20201002_1257"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='sms_alert_recipients',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None),
|
||||
model_name="coresettings",
|
||||
name="sms_alert_recipients",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(blank=True, max_length=255, null=True),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='twilio_account_sid',
|
||||
model_name="coresettings",
|
||||
name="twilio_account_sid",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='twilio_auth_token',
|
||||
model_name="coresettings",
|
||||
name="twilio_auth_token",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='twilio_number',
|
||||
model_name="coresettings",
|
||||
name="twilio_number",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0011_auto_20201026_0719"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="check_history_prune_days",
|
||||
field=models.PositiveIntegerField(default=30),
|
||||
),
|
||||
]
|
||||
@@ -49,6 +49,8 @@ class CoreSettings(BaseAuditModel):
|
||||
default_time_zone = models.CharField(
|
||||
max_length=255, choices=TZ_CHOICES, default="America/Los_Angeles"
|
||||
)
|
||||
# removes check history older than days
|
||||
check_history_prune_days = models.PositiveIntegerField(default=30)
|
||||
mesh_token = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||
mesh_username = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||
mesh_site = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||
|
||||
@@ -4,8 +4,10 @@ from loguru import logger
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from tacticalrmm.celery import app
|
||||
from core.models import CoreSettings
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
from checks.tasks import prune_check_history
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@@ -25,3 +27,7 @@ def core_maintenance_tasks():
|
||||
|
||||
if now > task_time_utc:
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
|
||||
# remove old CheckHistory data
|
||||
older_than = CoreSettings.objects.first().check_history_prune_days
|
||||
prune_check_history.delay(older_than)
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from core.tasks import core_maintenance_tasks
|
||||
from unittest.mock import patch
|
||||
from core.models import CoreSettings
|
||||
from model_bakery import baker, seq
|
||||
|
||||
|
||||
class TestCoreTasks(TacticalTestCase):
|
||||
@@ -31,3 +34,95 @@ class TestCoreTasks(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
def test_edit_coresettings(self, generate_all_agent_checks_task):
|
||||
url = "/core/editsettings/"
|
||||
|
||||
# setup
|
||||
policies = baker.make("Policy", _quantity=2)
|
||||
# test normal request
|
||||
data = {
|
||||
"smtp_from_email": "newexample@example.com",
|
||||
"mesh_token": "New_Mesh_Token",
|
||||
}
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(
|
||||
CoreSettings.objects.first().smtp_from_email, data["smtp_from_email"]
|
||||
)
|
||||
self.assertEqual(CoreSettings.objects.first().mesh_token, data["mesh_token"])
|
||||
|
||||
generate_all_agent_checks_task.assert_not_called()
|
||||
|
||||
# test adding policy
|
||||
data = {
|
||||
"workstation_policy": policies[0].id,
|
||||
"server_policy": policies[1].id,
|
||||
}
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(CoreSettings.objects.first().server_policy.id, policies[1].id)
|
||||
self.assertEqual(
|
||||
CoreSettings.objects.first().workstation_policy.id, policies[0].id
|
||||
)
|
||||
|
||||
self.assertEqual(generate_all_agent_checks_task.call_count, 2)
|
||||
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
|
||||
# test remove policy
|
||||
data = {
|
||||
"workstation_policy": "",
|
||||
}
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(CoreSettings.objects.first().workstation_policy, None)
|
||||
|
||||
self.assertEqual(generate_all_agent_checks_task.call_count, 1)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("tacticalrmm.utils.reload_nats")
|
||||
@patch("autotasks.tasks.remove_orphaned_win_tasks.delay")
|
||||
def test_ui_maintenance_actions(self, remove_orphaned_win_tasks, reload_nats):
|
||||
url = "/core/servermaintenance/"
|
||||
|
||||
agents = baker.make_recipe("agents.online_agent", _quantity=3)
|
||||
|
||||
# test with empty data
|
||||
r = self.client.post(url, {})
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test with invalid action
|
||||
data = {"action": "invalid_action"}
|
||||
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test reload nats action
|
||||
data = {"action": "reload_nats"}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
reload_nats.assert_called_once()
|
||||
|
||||
# test prune db with no tables
|
||||
data = {"action": "prune_db"}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test prune db with tables
|
||||
data = {
|
||||
"action": "prune_db",
|
||||
"prune_tables": ["audit_logs", "agent_outages", "pending_actions"],
|
||||
}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
# test remove orphaned tasks
|
||||
data = {"action": "rm_orphaned_tasks"}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
remove_orphaned_win_tasks.assert_called()
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -8,4 +8,5 @@ urlpatterns = [
|
||||
path("version/", views.version),
|
||||
path("emailtest/", views.email_test),
|
||||
path("dashinfo/", views.dashboard_info),
|
||||
path("servermaintenance/", views.server_maintenance),
|
||||
]
|
||||
|
||||
@@ -42,21 +42,19 @@ def get_core_settings(request):
|
||||
|
||||
@api_view(["PATCH"])
|
||||
def edit_settings(request):
|
||||
settings = CoreSettings.objects.first()
|
||||
serializer = CoreSettingsSerializer(instance=settings, data=request.data)
|
||||
coresettings = CoreSettings.objects.first()
|
||||
old_server_policy = coresettings.server_policy
|
||||
old_workstation_policy = coresettings.workstation_policy
|
||||
serializer = CoreSettingsSerializer(instance=coresettings, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
new_settings = serializer.save()
|
||||
|
||||
# check if default policies changed
|
||||
if settings.server_policy != new_settings.server_policy:
|
||||
generate_all_agent_checks_task.delay(
|
||||
mon_type="server", clear=True, create_tasks=True
|
||||
)
|
||||
if old_server_policy != new_settings.server_policy:
|
||||
generate_all_agent_checks_task.delay(mon_type="server", create_tasks=True)
|
||||
|
||||
if settings.workstation_policy != new_settings.workstation_policy:
|
||||
generate_all_agent_checks_task.delay(
|
||||
mon_type="workstation", clear=True, create_tasks=True
|
||||
)
|
||||
if old_workstation_policy != new_settings.workstation_policy:
|
||||
generate_all_agent_checks_task.delay(mon_type="workstation", create_tasks=True)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -69,7 +67,13 @@ def version(request):
|
||||
@api_view()
|
||||
def dashboard_info(request):
|
||||
return Response(
|
||||
{"trmm_version": settings.TRMM_VERSION, "dark_mode": request.user.dark_mode}
|
||||
{
|
||||
"trmm_version": settings.TRMM_VERSION,
|
||||
"dark_mode": request.user.dark_mode,
|
||||
"show_community_scripts": request.user.show_community_scripts,
|
||||
"dbl_click_action": request.user.agent_dblclick_action,
|
||||
"default_agent_tbl_tab": request.user.default_agent_tbl_tab,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -84,3 +88,56 @@ def email_test(request):
|
||||
return notify_error(r)
|
||||
|
||||
return Response("Email Test OK!")
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def server_maintenance(request):
|
||||
from tacticalrmm.utils import reload_nats
|
||||
|
||||
if "action" not in request.data:
|
||||
return notify_error("The data is incorrect")
|
||||
|
||||
if request.data["action"] == "reload_nats":
|
||||
reload_nats()
|
||||
return Response("Nats configuration was reloaded successfully.")
|
||||
|
||||
if request.data["action"] == "rm_orphaned_tasks":
|
||||
from agents.models import Agent
|
||||
from autotasks.tasks import remove_orphaned_win_tasks
|
||||
|
||||
agents = Agent.objects.only("pk", "last_seen", "overdue_time")
|
||||
online = [i for i in agents if i.status == "online"]
|
||||
for agent in online:
|
||||
remove_orphaned_win_tasks.delay(agent.pk)
|
||||
|
||||
return Response(
|
||||
"The task has been initiated. Check the Debug Log in the UI for progress."
|
||||
)
|
||||
|
||||
if request.data["action"] == "prune_db":
|
||||
from agents.models import AgentOutage
|
||||
from logs.models import AuditLog, PendingAction
|
||||
|
||||
if "prune_tables" not in request.data:
|
||||
return notify_error("The data is incorrect.")
|
||||
|
||||
tables = request.data["prune_tables"]
|
||||
records_count = 0
|
||||
if "agent_outages" in tables:
|
||||
agentoutages = AgentOutage.objects.exclude(recovery_time=None)
|
||||
records_count += agentoutages.count()
|
||||
agentoutages.delete()
|
||||
|
||||
if "audit_logs" in tables:
|
||||
auditlogs = AuditLog.objects.filter(action="check_run")
|
||||
records_count += auditlogs.count()
|
||||
auditlogs.delete()
|
||||
|
||||
if "pending_actions" in tables:
|
||||
pendingactions = PendingAction.objects.filter(status="completed")
|
||||
records_count += pendingactions.count()
|
||||
pendingactions.delete()
|
||||
|
||||
return Response(f"{records_count} records were pruned from the database")
|
||||
|
||||
return notify_error("The data is incorrect")
|
||||
|
||||
@@ -6,13 +6,28 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0007_auditlog_debug_info'),
|
||||
("logs", "0007_auditlog_debug_info"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='auditlog',
|
||||
name='action',
|
||||
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command')], max_length=100),
|
||||
model_name="auditlog",
|
||||
name="action",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("login", "User Login"),
|
||||
("failed_login", "Failed User Login"),
|
||||
("delete", "Delete Object"),
|
||||
("modify", "Modify Object"),
|
||||
("add", "Add Object"),
|
||||
("view", "View Object"),
|
||||
("check_run", "Check Run"),
|
||||
("task_run", "Task Run"),
|
||||
("remote_session", "Remote Session"),
|
||||
("execute_script", "Execute Script"),
|
||||
("execute_command", "Execute Command"),
|
||||
],
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,13 +6,29 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0008_auto_20201110_1431'),
|
||||
("logs", "0008_auto_20201110_1431"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='auditlog',
|
||||
name='action',
|
||||
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command')], max_length=100),
|
||||
model_name="auditlog",
|
||||
name="action",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("login", "User Login"),
|
||||
("failed_login", "Failed User Login"),
|
||||
("delete", "Delete Object"),
|
||||
("modify", "Modify Object"),
|
||||
("add", "Add Object"),
|
||||
("view", "View Object"),
|
||||
("check_run", "Check Run"),
|
||||
("task_run", "Task Run"),
|
||||
("agent_install", "Agent Install"),
|
||||
("remote_session", "Remote Session"),
|
||||
("execute_script", "Execute Script"),
|
||||
("execute_command", "Execute Command"),
|
||||
],
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user