Compare commits
338 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6a55ca20f3 | ||
|
|
c56c537f7f | ||
|
|
fd7d776121 | ||
|
|
1af28190d8 | ||
|
|
6b305be567 | ||
|
|
3bf70513b7 | ||
|
|
7e64404654 | ||
|
|
e1b5226f34 | ||
|
|
0d7128ad31 | ||
|
|
5778626087 | ||
|
|
3ff48756ed | ||
|
|
0ce9a6eeba | ||
|
|
ad527b4aed | ||
|
|
6633bb452e | ||
|
|
efeb0b4feb | ||
|
|
8cc11fc102 | ||
|
|
ee6a167220 | ||
|
|
8d4ad3c405 | ||
|
|
072fbf4d60 | ||
|
|
727c41c283 | ||
|
|
e2266838b6 | ||
|
|
775762d615 | ||
|
|
900c3008cb | ||
|
|
09379213a6 | ||
|
|
ceb97048e3 | ||
|
|
4561515517 | ||
|
|
a7b285759f | ||
|
|
b4531b2a12 | ||
|
|
9e1d261c76 | ||
|
|
e35fa15cd2 | ||
|
|
dbd1f0d4f9 | ||
|
|
9ade78b703 | ||
|
|
f20e244b5f | ||
|
|
0989308b7e | ||
|
|
12c7140536 | ||
|
|
2a0b605e92 | ||
|
|
6978890e6a | ||
|
|
561abd6cb9 | ||
|
|
4dd6227f0b | ||
|
|
1ec314c31c | ||
|
|
a2be5a00be | ||
|
|
4e2241c115 | ||
|
|
8459bca64a | ||
|
|
24cb0565b9 | ||
|
|
9442acb028 | ||
|
|
4f7f181a42 | ||
|
|
b7dd8737a7 | ||
|
|
2207eeb727 | ||
|
|
89dad7dfe7 | ||
|
|
e5803d0cf3 | ||
|
|
c1fffe9ae6 | ||
|
|
9e6cbd3d32 | ||
|
|
2ea8742510 | ||
|
|
5cfa0254f9 | ||
|
|
8cd2544f78 | ||
|
|
c03b768364 | ||
|
|
d60481ead4 | ||
|
|
126be3827d | ||
|
|
121274dca2 | ||
|
|
0ecf8da27e | ||
|
|
4a6bcb525d | ||
|
|
83f9ee50dd | ||
|
|
2bff297f79 | ||
|
|
dee68f6933 | ||
|
|
afa1e19c83 | ||
|
|
6052088eb4 | ||
|
|
c7fa5167c4 | ||
|
|
1034b0b146 | ||
|
|
8bcc4e5945 | ||
|
|
c3c24aa1db | ||
|
|
281c75d2d2 | ||
|
|
52307420f3 | ||
|
|
6185347cd8 | ||
|
|
b6cd29f77e | ||
|
|
b8ea8b1567 | ||
|
|
2f7dc98830 | ||
|
|
e248a99f79 | ||
|
|
4fb6d9aa5d | ||
|
|
f092ea8d67 | ||
|
|
c32cbbdda6 | ||
|
|
2497675259 | ||
|
|
8d084ab90a | ||
|
|
2398773ef0 | ||
|
|
a05998a30e | ||
|
|
f863c29194 | ||
|
|
d16a98c788 | ||
|
|
9421b02e96 | ||
|
|
10256864e4 | ||
|
|
85d010615d | ||
|
|
cd1cb186be | ||
|
|
4458354d70 | ||
|
|
0f27da8808 | ||
|
|
dd76bfa3c2 | ||
|
|
5780a66f7d | ||
|
|
d4342c034c | ||
|
|
1ec43f2530 | ||
|
|
3c300d8fdf | ||
|
|
23119b55d1 | ||
|
|
c8fb0e8f8a | ||
|
|
0ec32a77ef | ||
|
|
52921bfce8 | ||
|
|
960b929097 | ||
|
|
d4ce23eced | ||
|
|
6925510f44 | ||
|
|
9827ad4c22 | ||
|
|
ef8aaee028 | ||
|
|
3d7d39f248 | ||
|
|
3eac620560 | ||
|
|
ab17006956 | ||
|
|
bfc6889ee9 | ||
|
|
0ec0b4a044 | ||
|
|
f1a523f327 | ||
|
|
4181449aea | ||
|
|
e192f8db52 | ||
|
|
8097c681ac | ||
|
|
f45938bdd5 | ||
|
|
6ea4e97eca | ||
|
|
f274c8e837 | ||
|
|
335e571485 | ||
|
|
a11616aace | ||
|
|
883acadbc4 | ||
|
|
f51e6a3fcf | ||
|
|
371e081c0d | ||
|
|
6f41b3bf1c | ||
|
|
c1d74a6c9e | ||
|
|
24eaa6796e | ||
|
|
1521e3b620 | ||
|
|
b6ff38dd62 | ||
|
|
44ea9ac03c | ||
|
|
4c2701505b | ||
|
|
9022fe18da | ||
|
|
63be349f8b | ||
|
|
c40256a290 | ||
|
|
33ecb8ec52 | ||
|
|
82d62a0015 | ||
|
|
6278240526 | ||
|
|
8c2dc5f57d | ||
|
|
2e5868778a | ||
|
|
a10b8dab9b | ||
|
|
92f4f7ef59 | ||
|
|
31257bd5cb | ||
|
|
bb6510862f | ||
|
|
797ecf0780 | ||
|
|
f9536dc67f | ||
|
|
e8b95362af | ||
|
|
bdc39ad4ec | ||
|
|
4a202c5585 | ||
|
|
3c6b321f73 | ||
|
|
cb29b52799 | ||
|
|
7e48015a54 | ||
|
|
9ed3abf932 | ||
|
|
61762828a3 | ||
|
|
59beabe5ac | ||
|
|
0b30faa28c | ||
|
|
d12d49b93f | ||
|
|
f1d64d275a | ||
|
|
d094eeeb03 | ||
|
|
be25af658e | ||
|
|
794f52c229 | ||
|
|
5d4dc4ed4c | ||
|
|
e49d97b898 | ||
|
|
b6b4f1ba62 | ||
|
|
653d476716 | ||
|
|
48b855258c | ||
|
|
c7efdaf5f9 | ||
|
|
22523ed3d3 | ||
|
|
33c602dd61 | ||
|
|
e2a5509b76 | ||
|
|
61a0fa1a89 | ||
|
|
a35bd8292b | ||
|
|
06c8ae60e3 | ||
|
|
deeab1f845 | ||
|
|
da81c4c987 | ||
|
|
d180f1b2d5 | ||
|
|
526135629c | ||
|
|
6b9493e057 | ||
|
|
9bb33d2afc | ||
|
|
7421138533 | ||
|
|
d0800c52bb | ||
|
|
913fcd4df2 | ||
|
|
83322cc725 | ||
|
|
5944501feb | ||
|
|
17e3603d3d | ||
|
|
95be43ae47 | ||
|
|
feb91cbbaa | ||
|
|
79409af168 | ||
|
|
5dbfb64822 | ||
|
|
5e7ebf5e69 | ||
|
|
e73215ca74 | ||
|
|
a5f123b9ce | ||
|
|
ac058e9675 | ||
|
|
371b764d1d | ||
|
|
66d7172e09 | ||
|
|
99d3a8a749 | ||
|
|
db5ff372a4 | ||
|
|
3fe83f81be | ||
|
|
669e638fd6 | ||
|
|
f1f999f3b6 | ||
|
|
6f3b6fa9ce | ||
|
|
938f945301 | ||
|
|
e3efb2aad6 | ||
|
|
1e678c0d78 | ||
|
|
a59c111140 | ||
|
|
a8b2a31bed | ||
|
|
37402f9ee8 | ||
|
|
e7b5ecb40f | ||
|
|
c817ef04b9 | ||
|
|
f52b18439c | ||
|
|
1e03c628d5 | ||
|
|
71fb39db1f | ||
|
|
bcfb3726b0 | ||
|
|
c6e9e29671 | ||
|
|
1bfefcce39 | ||
|
|
22488e93e1 | ||
|
|
244b89f035 | ||
|
|
1f9a241b94 | ||
|
|
03641aae42 | ||
|
|
a2bdd113cc | ||
|
|
a92e2f3c7b | ||
|
|
97766b3a57 | ||
|
|
9ef4c3bb06 | ||
|
|
d82f0cd757 | ||
|
|
5f529e2af4 | ||
|
|
beadd9e02b | ||
|
|
72543789cb | ||
|
|
5789439fa9 | ||
|
|
f549126bcf | ||
|
|
7197548bad | ||
|
|
241fde783c | ||
|
|
2b872cd1f4 | ||
|
|
a606fb4d1d | ||
|
|
9f9c6be38e | ||
|
|
01ee524049 | ||
|
|
af9cb65338 | ||
|
|
8aa11c580b | ||
|
|
ada627f444 | ||
|
|
a7b6d338c3 | ||
|
|
9f00538b97 | ||
|
|
a085015282 | ||
|
|
0b9c220fbb | ||
|
|
0e3d04873d | ||
|
|
b7578d939f | ||
|
|
b5c28de03f | ||
|
|
e17d25c156 | ||
|
|
c25dc1b99c | ||
|
|
a493a574bd | ||
|
|
4284493dce | ||
|
|
25059de8e1 | ||
|
|
1731b05ad0 | ||
|
|
e80dc663ac | ||
|
|
39988a4c2f | ||
|
|
415bff303a | ||
|
|
a65eb62a54 | ||
|
|
03b2982128 | ||
|
|
bff0527857 | ||
|
|
f3b7634254 | ||
|
|
6a9593c0b9 | ||
|
|
edb785b8e5 | ||
|
|
26d757b50a | ||
|
|
535079ee87 | ||
|
|
ac380c29c1 | ||
|
|
3fd212f26c | ||
|
|
04a3abc651 | ||
|
|
6caf85ddd1 | ||
|
|
16e4071508 | ||
|
|
69e7c4324b | ||
|
|
a1c4a8cbe5 | ||
|
|
e37f6cfda7 | ||
|
|
989c804409 | ||
|
|
7345bc3c82 | ||
|
|
69bee35700 | ||
|
|
598e24df7c | ||
|
|
0ae669201e | ||
|
|
f52a8a4642 | ||
|
|
9c40b61ef2 | ||
|
|
72dabcda83 | ||
|
|
161a06dbcc | ||
|
|
8ed3d4e70c | ||
|
|
a4223ccc8a | ||
|
|
ca85923855 | ||
|
|
52bfe7c493 | ||
|
|
4786bd0cbe | ||
|
|
cadab160ff | ||
|
|
6a7f17b2b0 | ||
|
|
4986a4d775 | ||
|
|
903af0c2cf | ||
|
|
3282fa803c | ||
|
|
67cc47608d | ||
|
|
0411704b8b | ||
|
|
1de85b2c69 | ||
|
|
33b012f29d | ||
|
|
1357584df3 | ||
|
|
e15809e271 | ||
|
|
0da1950427 | ||
|
|
e590b921be | ||
|
|
09462692f5 | ||
|
|
c1d1b5f762 | ||
|
|
6b9c87b858 | ||
|
|
485b6eb904 | ||
|
|
057630bdb5 | ||
|
|
6b02873b30 | ||
|
|
0fa0fc6d6b | ||
|
|
339ec07465 | ||
|
|
cd2e798fea | ||
|
|
d5cadbeae2 | ||
|
|
8046a3ccae | ||
|
|
bf91d60b31 | ||
|
|
539c047ec8 | ||
|
|
290c18fa87 | ||
|
|
98c46f5e57 | ||
|
|
f8bd5b5b4e | ||
|
|
816d32edad | ||
|
|
8453835c05 | ||
|
|
9328c356c8 | ||
|
|
89e3c1fc94 | ||
|
|
67e54cd15d | ||
|
|
278ea24786 | ||
|
|
aba1662631 | ||
|
|
61eeb60c19 | ||
|
|
5e9a8f4806 | ||
|
|
4cb274e9bc | ||
|
|
8b9b1a6a35 | ||
|
|
2655964113 | ||
|
|
188bad061b | ||
|
|
3af4c329aa | ||
|
|
6c13395f7d | ||
|
|
77b32ba360 | ||
|
|
91dba291ac | ||
|
|
a6bc293640 | ||
|
|
53882d6e5f | ||
|
|
d68adfbf10 | ||
|
|
498a392d7f | ||
|
|
740f6c05db | ||
|
|
d810ce301f | ||
|
|
5ef6a14d24 | ||
|
|
a13f6f1e68 | ||
|
|
d2d0f1aaee | ||
|
|
e64c72cc89 |
@@ -23,5 +23,6 @@ POSTGRES_USER=postgres
|
|||||||
POSTGRES_PASS=postgrespass
|
POSTGRES_PASS=postgrespass
|
||||||
|
|
||||||
# DEV SETTINGS
|
# DEV SETTINGS
|
||||||
APP_PORT=8080
|
APP_PORT=80
|
||||||
API_PORT=8000
|
API_PORT=80
|
||||||
|
HTTP_PROTOCOL=https
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM python:3.8-slim
|
FROM python:3.9.2-slim
|
||||||
|
|
||||||
ENV TACTICAL_DIR /opt/tactical
|
ENV TACTICAL_DIR /opt/tactical
|
||||||
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||||
@@ -15,7 +15,7 @@ RUN groupadd -g 1000 tactical && \
|
|||||||
useradd -u 1000 -g 1000 tactical
|
useradd -u 1000 -g 1000 tactical
|
||||||
|
|
||||||
# Copy Go Files
|
# Copy Go Files
|
||||||
COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go
|
COPY --from=golang:1.16 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||||
|
|
||||||
# Copy Dev python reqs
|
# Copy Dev python reqs
|
||||||
COPY ./requirements.txt /
|
COPY ./requirements.txt /
|
||||||
|
|||||||
@@ -3,12 +3,15 @@ version: '3.4'
|
|||||||
services:
|
services:
|
||||||
api-dev:
|
api-dev:
|
||||||
image: api-dev
|
image: api-dev
|
||||||
|
restart: always
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: ./api.dockerfile
|
dockerfile: ./api.dockerfile
|
||||||
command: ["tactical-api"]
|
command: ["tactical-api"]
|
||||||
|
environment:
|
||||||
|
API_PORT: ${API_PORT}
|
||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- "8000:${API_PORT}"
|
||||||
volumes:
|
volumes:
|
||||||
- tactical-data-dev:/opt/tactical
|
- tactical-data-dev:/opt/tactical
|
||||||
- ..:/workspace:cached
|
- ..:/workspace:cached
|
||||||
@@ -19,40 +22,31 @@ services:
|
|||||||
|
|
||||||
app-dev:
|
app-dev:
|
||||||
image: node:12-alpine
|
image: node:12-alpine
|
||||||
ports:
|
restart: always
|
||||||
- 8080:8080
|
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||||
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port 8080"
|
|
||||||
working_dir: /workspace/web
|
working_dir: /workspace/web
|
||||||
volumes:
|
volumes:
|
||||||
- ..:/workspace:cached
|
- ..:/workspace:cached
|
||||||
|
ports:
|
||||||
|
- "8080:${APP_PORT}"
|
||||||
networks:
|
networks:
|
||||||
dev:
|
dev:
|
||||||
aliases:
|
aliases:
|
||||||
- tactical-frontend
|
- tactical-frontend
|
||||||
|
|
||||||
# salt master and api
|
|
||||||
salt-dev:
|
|
||||||
image: ${IMAGE_REPO}tactical-salt:${VERSION}
|
|
||||||
restart: always
|
|
||||||
volumes:
|
|
||||||
- tactical-data-dev:/opt/tactical
|
|
||||||
- salt-data-dev:/etc/salt
|
|
||||||
ports:
|
|
||||||
- "4505:4505"
|
|
||||||
- "4506:4506"
|
|
||||||
networks:
|
|
||||||
dev:
|
|
||||||
aliases:
|
|
||||||
- tactical-salt
|
|
||||||
|
|
||||||
# nats
|
# nats
|
||||||
nats-dev:
|
nats-dev:
|
||||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||||
restart: always
|
restart: always
|
||||||
|
environment:
|
||||||
|
API_HOST: ${API_HOST}
|
||||||
|
API_PORT: ${API_PORT}
|
||||||
|
DEV: 1
|
||||||
ports:
|
ports:
|
||||||
- "4222:4222"
|
- "4222:4222"
|
||||||
volumes:
|
volumes:
|
||||||
- tactical-data-dev:/opt/tactical
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
networks:
|
networks:
|
||||||
dev:
|
dev:
|
||||||
aliases:
|
aliases:
|
||||||
@@ -136,6 +130,8 @@ services:
|
|||||||
MESH_USER: ${MESH_USER}
|
MESH_USER: ${MESH_USER}
|
||||||
TRMM_USER: ${TRMM_USER}
|
TRMM_USER: ${TRMM_USER}
|
||||||
TRMM_PASS: ${TRMM_PASS}
|
TRMM_PASS: ${TRMM_PASS}
|
||||||
|
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
|
||||||
|
APP_PORT: ${APP_PORT}
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres-dev
|
- postgres-dev
|
||||||
- meshcentral-dev
|
- meshcentral-dev
|
||||||
@@ -179,23 +175,6 @@ services:
|
|||||||
- postgres-dev
|
- postgres-dev
|
||||||
- redis-dev
|
- redis-dev
|
||||||
|
|
||||||
# container for celery winupdate tasks
|
|
||||||
celerywinupdate-dev:
|
|
||||||
image: api-dev
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: ./api.dockerfile
|
|
||||||
command: ["tactical-celerywinupdate-dev"]
|
|
||||||
restart: always
|
|
||||||
networks:
|
|
||||||
- dev
|
|
||||||
volumes:
|
|
||||||
- tactical-data-dev:/opt/tactical
|
|
||||||
- ..:/workspace:cached
|
|
||||||
depends_on:
|
|
||||||
- postgres-dev
|
|
||||||
- redis-dev
|
|
||||||
|
|
||||||
nginx-dev:
|
nginx-dev:
|
||||||
# container for tactical reverse proxy
|
# container for tactical reverse proxy
|
||||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||||
@@ -206,8 +185,8 @@ services:
|
|||||||
MESH_HOST: ${MESH_HOST}
|
MESH_HOST: ${MESH_HOST}
|
||||||
CERT_PUB_KEY: ${CERT_PUB_KEY}
|
CERT_PUB_KEY: ${CERT_PUB_KEY}
|
||||||
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||||
APP_PORT: 8080
|
APP_PORT: ${APP_PORT}
|
||||||
API_PORT: 8000
|
API_PORT: ${API_PORT}
|
||||||
networks:
|
networks:
|
||||||
dev:
|
dev:
|
||||||
ipv4_address: 172.21.0.20
|
ipv4_address: 172.21.0.20
|
||||||
@@ -222,7 +201,6 @@ volumes:
|
|||||||
postgres-data-dev:
|
postgres-data-dev:
|
||||||
mongo-dev-data:
|
mongo-dev-data:
|
||||||
mesh-data-dev:
|
mesh-data-dev:
|
||||||
salt-data-dev:
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
dev:
|
dev:
|
||||||
|
|||||||
@@ -9,8 +9,6 @@ set -e
|
|||||||
: "${POSTGRES_USER:=tactical}"
|
: "${POSTGRES_USER:=tactical}"
|
||||||
: "${POSTGRES_PASS:=tactical}"
|
: "${POSTGRES_PASS:=tactical}"
|
||||||
: "${POSTGRES_DB:=tacticalrmm}"
|
: "${POSTGRES_DB:=tacticalrmm}"
|
||||||
: "${SALT_HOST:=tactical-salt}"
|
|
||||||
: "${SALT_USER:=saltapi}"
|
|
||||||
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
||||||
: "${MESH_USER:=meshcentral}"
|
: "${MESH_USER:=meshcentral}"
|
||||||
: "${MESH_PASS:=meshcentralpass}"
|
: "${MESH_PASS:=meshcentralpass}"
|
||||||
@@ -18,6 +16,9 @@ set -e
|
|||||||
: "${API_HOST:=tactical-backend}"
|
: "${API_HOST:=tactical-backend}"
|
||||||
: "${APP_HOST:=tactical-frontend}"
|
: "${APP_HOST:=tactical-frontend}"
|
||||||
: "${REDIS_HOST:=tactical-redis}"
|
: "${REDIS_HOST:=tactical-redis}"
|
||||||
|
: "${HTTP_PROTOCOL:=http}"
|
||||||
|
: "${APP_PORT:=8080}"
|
||||||
|
: "${API_PORT:=8000}"
|
||||||
|
|
||||||
# Add python venv to path
|
# Add python venv to path
|
||||||
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
||||||
@@ -44,17 +45,9 @@ function django_setup {
|
|||||||
echo "setting up django environment"
|
echo "setting up django environment"
|
||||||
|
|
||||||
# configure django settings
|
# configure django settings
|
||||||
MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token)
|
MESH_TOKEN="$(cat ${TACTICAL_DIR}/tmp/mesh_token)"
|
||||||
|
|
||||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||||
|
|
||||||
# write salt pass to tmp dir
|
|
||||||
if [ ! -f "${TACTICAL__DIR}/tmp/salt_pass" ]; then
|
|
||||||
SALT_PASS=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
|
|
||||||
echo "${SALT_PASS}" > ${TACTICAL_DIR}/tmp/salt_pass
|
|
||||||
else
|
|
||||||
SALT_PASS=$(cat ${TACTICAL_DIR}/tmp/salt_pass)
|
|
||||||
fi
|
|
||||||
|
|
||||||
localvars="$(cat << EOF
|
localvars="$(cat << EOF
|
||||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||||
@@ -68,7 +61,7 @@ KEY_FILE = '/opt/tactical/certs/privkey.pem'
|
|||||||
|
|
||||||
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
|
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
|
||||||
|
|
||||||
ALLOWED_HOSTS = ['${API_HOST}', 'localhost', '127.0.0.1']
|
ALLOWED_HOSTS = ['${API_HOST}', '*']
|
||||||
|
|
||||||
ADMIN_URL = 'admin/'
|
ADMIN_URL = 'admin/'
|
||||||
|
|
||||||
@@ -103,51 +96,47 @@ if not DEBUG:
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
SALT_USERNAME = '${SALT_USER}'
|
|
||||||
SALT_PASSWORD = '${SALT_PASS}'
|
|
||||||
SALT_HOST = '${SALT_HOST}'
|
|
||||||
MESH_USERNAME = '${MESH_USER}'
|
MESH_USERNAME = '${MESH_USER}'
|
||||||
MESH_SITE = 'https://${MESH_HOST}'
|
MESH_SITE = 'https://${MESH_HOST}'
|
||||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||||
REDIS_HOST = '${REDIS_HOST}'
|
REDIS_HOST = '${REDIS_HOST}'
|
||||||
|
ADMIN_ENABLED = True
|
||||||
EOF
|
EOF
|
||||||
)"
|
)"
|
||||||
|
|
||||||
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||||
|
|
||||||
# run migrations and init scripts
|
# run migrations and init scripts
|
||||||
python manage.py migrate --no-input
|
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
|
||||||
python manage.py collectstatic --no-input
|
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
|
||||||
python manage.py initial_db_setup
|
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup
|
||||||
python manage.py initial_mesh_setup
|
"${VIRTUAL_ENV}"/bin/python manage.py initial_mesh_setup
|
||||||
python manage.py load_chocos
|
"${VIRTUAL_ENV}"/bin/python manage.py load_chocos
|
||||||
python manage.py load_community_scripts
|
"${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts
|
||||||
python manage.py reload_nats
|
"${VIRTUAL_ENV}"/bin/python manage.py reload_nats
|
||||||
|
|
||||||
# create super user
|
# create super user
|
||||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if [ "$1" = 'tactical-init-dev' ]; then
|
if [ "$1" = 'tactical-init-dev' ]; then
|
||||||
|
|
||||||
# make directories if they don't exist
|
# make directories if they don't exist
|
||||||
mkdir -p ${TACTICAL_DIR}/tmp
|
mkdir -p "${TACTICAL_DIR}/tmp"
|
||||||
|
|
||||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||||
|
|
||||||
# setup Python virtual env and install dependencies
|
# setup Python virtual env and install dependencies
|
||||||
test -f ${VIRTUAL_ENV} && python -m venv --copies ${VIRTUAL_ENV}
|
! test -e "${VIRTUAL_ENV}" && python -m venv ${VIRTUAL_ENV}
|
||||||
pip install --no-cache-dir -r /requirements.txt
|
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt
|
||||||
|
|
||||||
django_setup
|
django_setup
|
||||||
|
|
||||||
# create .env file for frontend
|
# create .env file for frontend
|
||||||
webenv="$(cat << EOF
|
webenv="$(cat << EOF
|
||||||
PROD_URL = "http://${API_HOST}:8000"
|
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||||
DEV_URL = "http://${API_HOST}:8000"
|
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||||
DEV_HOST = 0.0.0.0
|
APP_URL = https://${APP_HOST}
|
||||||
DEV_PORT = 8080
|
|
||||||
EOF
|
EOF
|
||||||
)"
|
)"
|
||||||
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
||||||
@@ -161,22 +150,20 @@ EOF
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$1" = 'tactical-api' ]; then
|
if [ "$1" = 'tactical-api' ]; then
|
||||||
|
cp "${WORKSPACE_DIR}"/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||||
|
chmod +x /usr/local/bin/goversioninfo
|
||||||
|
|
||||||
check_tactical_ready
|
check_tactical_ready
|
||||||
python manage.py runserver 0.0.0.0:8000
|
"${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$1" = 'tactical-celery-dev' ]; then
|
if [ "$1" = 'tactical-celery-dev' ]; then
|
||||||
check_tactical_ready
|
check_tactical_ready
|
||||||
celery -A tacticalrmm worker -l debug
|
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm worker -l debug
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||||
check_tactical_ready
|
check_tactical_ready
|
||||||
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||||
celery -A tacticalrmm beat -l debug
|
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$1" = 'tactical-celerywinupdate-dev' ]; then
|
|
||||||
check_tactical_ready
|
|
||||||
celery -A tacticalrmm worker -Q wupdate -l debug
|
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,44 +1,32 @@
|
|||||||
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||||
amqp==2.6.1
|
asyncio-nats-client
|
||||||
asgiref==3.3.1
|
celery
|
||||||
asyncio-nats-client==0.11.4
|
Django
|
||||||
billiard==3.6.3.0
|
django-cors-headers
|
||||||
celery==4.4.6
|
django-rest-knox
|
||||||
certifi==2020.12.5
|
djangorestframework
|
||||||
cffi==1.14.3
|
loguru
|
||||||
chardet==3.0.4
|
msgpack
|
||||||
cryptography==3.2.1
|
psycopg2-binary
|
||||||
decorator==4.4.2
|
pycparser
|
||||||
Django==3.1.4
|
pycryptodome
|
||||||
django-cors-headers==3.5.0
|
pyotp
|
||||||
django-rest-knox==4.1.0
|
pyparsing
|
||||||
djangorestframework==3.12.2
|
pytz
|
||||||
future==0.18.2
|
qrcode
|
||||||
idna==2.10
|
redis
|
||||||
kombu==4.6.11
|
twilio
|
||||||
loguru==0.5.3
|
packaging
|
||||||
msgpack==1.0.0
|
validators
|
||||||
packaging==20.4
|
websockets
|
||||||
psycopg2-binary==2.8.6
|
|
||||||
pycparser==2.20
|
|
||||||
pycryptodome==3.9.9
|
|
||||||
pyotp==2.4.1
|
|
||||||
pyparsing==2.4.7
|
|
||||||
pytz==2020.4
|
|
||||||
qrcode==6.1
|
|
||||||
redis==3.5.3
|
|
||||||
requests==2.25.0
|
|
||||||
six==1.15.0
|
|
||||||
sqlparse==0.4.1
|
|
||||||
twilio==6.49.0
|
|
||||||
urllib3==1.26.2
|
|
||||||
validators==0.18.1
|
|
||||||
vine==1.3.0
|
|
||||||
websockets==8.1
|
|
||||||
zipp==3.4.0
|
|
||||||
black
|
black
|
||||||
Werkzeug
|
Werkzeug
|
||||||
django-extensions
|
django-extensions
|
||||||
coverage
|
coverage
|
||||||
coveralls
|
coveralls
|
||||||
model_bakery
|
model_bakery
|
||||||
|
mkdocs
|
||||||
|
mkdocs-material
|
||||||
|
pymdown-extensions
|
||||||
|
Pygments
|
||||||
|
mypy
|
||||||
|
|||||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
github: wh1te909
|
github: wh1te909
|
||||||
patreon: # Replace with a single Patreon username
|
patreon: # Replace with a single Patreon username
|
||||||
open_collective: # Replace with a single Open Collective username
|
open_collective: # Replace with a single Open Collective username
|
||||||
ko_fi: # Replace with a single Ko-fi username
|
ko_fi: tacticalrmm
|
||||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
liberapay: # Replace with a single Liberapay username
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
|||||||
22
.github/workflows/deploy-docs.yml
vendored
Normal file
22
.github/workflows/deploy-docs.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
name: Deploy Docs
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: docs
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.x
|
||||||
|
- run: pip install --upgrade pip
|
||||||
|
- run: pip install --upgrade setuptools wheel
|
||||||
|
- run: pip install mkdocs mkdocs-material pymdown-extensions
|
||||||
|
- run: mkdocs gh-deploy --force
|
||||||
10
.github/workflows/docker-build-push.yml
vendored
10
.github/workflows/docker-build-push.yml
vendored
@@ -57,16 +57,6 @@ jobs:
|
|||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
||||||
|
|
||||||
- name: Build and Push Tactical Salt Image
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
pull: true
|
|
||||||
file: ./docker/containers/tactical-salt/dockerfile
|
|
||||||
platforms: linux/amd64
|
|
||||||
tags: tacticalrmm/tactical-salt:${{ steps.prep.outputs.version }},tacticalrmm/tactical-salt:latest
|
|
||||||
|
|
||||||
- name: Build and Push Tactical Frontend Image
|
- name: Build and Push Tactical Frontend Image
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -45,3 +45,5 @@ htmlcov/
|
|||||||
docker-compose.dev.yml
|
docker-compose.dev.yml
|
||||||
docs/.vuepress/dist
|
docs/.vuepress/dist
|
||||||
nats-rmm.conf
|
nats-rmm.conf
|
||||||
|
.mypy_cache
|
||||||
|
docs/site/
|
||||||
|
|||||||
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@@ -3,7 +3,14 @@
|
|||||||
"python.languageServer": "Pylance",
|
"python.languageServer": "Pylance",
|
||||||
"python.analysis.extraPaths": [
|
"python.analysis.extraPaths": [
|
||||||
"api/tacticalrmm",
|
"api/tacticalrmm",
|
||||||
|
"api/env",
|
||||||
],
|
],
|
||||||
|
"python.analysis.diagnosticSeverityOverrides": {
|
||||||
|
"reportUnusedImport": "error",
|
||||||
|
"reportDuplicateImport": "error",
|
||||||
|
},
|
||||||
|
"python.analysis.memory.keepLibraryAst": true,
|
||||||
|
"python.linting.mypyEnabled": true,
|
||||||
"python.analysis.typeCheckingMode": "basic",
|
"python.analysis.typeCheckingMode": "basic",
|
||||||
"python.formatting.provider": "black",
|
"python.formatting.provider": "black",
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
|
|||||||
101
README.md
101
README.md
@@ -6,7 +6,7 @@
|
|||||||
[](https://github.com/python/black)
|
[](https://github.com/python/black)
|
||||||
|
|
||||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang, as well as the [SaltStack](https://github.com/saltstack/salt) api and [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||||
|
|
||||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||||
@@ -15,6 +15,8 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
|||||||
|
|
||||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||||
|
|
||||||
|
### [Documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- Teamviewer-like remote desktop control
|
- Teamviewer-like remote desktop control
|
||||||
@@ -33,99 +35,6 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
|||||||
|
|
||||||
- Windows 7, 8.1, 10, Server 2008R2, 2012R2, 2016, 2019
|
- Windows 7, 8.1, 10, Server 2008R2, 2012R2, 2016, 2019
|
||||||
|
|
||||||
## Installation
|
## Installation / Backup / Restore / Usage
|
||||||
|
|
||||||
### Requirements
|
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||||
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10)
|
|
||||||
- A domain you own with at least 3 subdomains
|
|
||||||
- Google Authenticator app (2 factor is NOT optional)
|
|
||||||
|
|
||||||
### Docker
|
|
||||||
Refer to the [docker setup](docker/readme.md)
|
|
||||||
|
|
||||||
|
|
||||||
### Installation example (Ubuntu server 20.04 LTS)
|
|
||||||
|
|
||||||
Fresh VPS with latest updates\
|
|
||||||
login as root and create a user and add to sudoers group (we will be creating a user called tactical)
|
|
||||||
```
|
|
||||||
apt update && apt -y upgrade
|
|
||||||
adduser tactical
|
|
||||||
usermod -a -G sudo tactical
|
|
||||||
```
|
|
||||||
|
|
||||||
switch to the tactical user and setup the firewall
|
|
||||||
```
|
|
||||||
su - tactical
|
|
||||||
sudo ufw default deny incoming
|
|
||||||
sudo ufw default allow outgoing
|
|
||||||
sudo ufw allow ssh
|
|
||||||
sudo ufw allow http
|
|
||||||
sudo ufw allow https
|
|
||||||
sudo ufw allow proto tcp from any to any port 4505,4506
|
|
||||||
sudo ufw allow proto tcp from any to any port 4222
|
|
||||||
sudo ufw enable && sudo ufw reload
|
|
||||||
```
|
|
||||||
|
|
||||||
Our domain for this example is tacticalrmm.com
|
|
||||||
|
|
||||||
In the DNS manager of wherever our domain is hosted, we will create three A records, all pointing to the public IP address of our VPS
|
|
||||||
|
|
||||||
Create A record ```api.tacticalrmm.com``` for the django rest backend\
|
|
||||||
Create A record ```rmm.tacticalrmm.com``` for the vue frontend\
|
|
||||||
Create A record ```mesh.tacticalrmm.com``` for meshcentral
|
|
||||||
|
|
||||||
Download the install script and run it
|
|
||||||
|
|
||||||
```
|
|
||||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh
|
|
||||||
chmod +x install.sh
|
|
||||||
./install.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
Links will be provided at the end of the install script.\
|
|
||||||
Download the executable from the first link, then open ```rmm.tacticalrmm.com``` and login.\
|
|
||||||
Upload the executable when prompted during the initial setup page.
|
|
||||||
|
|
||||||
|
|
||||||
### Install an agent
|
|
||||||
From the app's dashboard, choose Agents > Install Agent to generate an installer.
|
|
||||||
|
|
||||||
## Updating
|
|
||||||
Download and run [update.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh)
|
|
||||||
```
|
|
||||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh
|
|
||||||
chmod +x update.sh
|
|
||||||
./update.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Backup
|
|
||||||
Download [backup.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh)
|
|
||||||
```
|
|
||||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh
|
|
||||||
```
|
|
||||||
Change the postgres username and password at the top of the file (you can find them in `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py` under the DATABASES section)
|
|
||||||
|
|
||||||
Run it
|
|
||||||
```
|
|
||||||
chmod +x backup.sh
|
|
||||||
./backup.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Restore
|
|
||||||
Change your 3 A records to point to new server's public IP
|
|
||||||
|
|
||||||
Create same linux user account as old server and add to sudoers group and setup firewall (see install instructions above)
|
|
||||||
|
|
||||||
Copy backup file to new server
|
|
||||||
|
|
||||||
Download the restore script, and edit the postgres username/password at the top of the file. Same instructions as above in the backup steps.
|
|
||||||
```
|
|
||||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
Run the restore script, passing it the backup tar file as the first argument
|
|
||||||
```
|
|
||||||
chmod +x restore.sh
|
|
||||||
./restore.sh rmm-backup-xxxxxxx.tar
|
|
||||||
```
|
|
||||||
@@ -1,457 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
import psutil
|
|
||||||
import os
|
|
||||||
import datetime
|
|
||||||
import zlib
|
|
||||||
import json
|
|
||||||
import base64
|
|
||||||
import wmi
|
|
||||||
import win32evtlog
|
|
||||||
import win32con
|
|
||||||
import win32evtlogutil
|
|
||||||
import winerror
|
|
||||||
from time import sleep
|
|
||||||
import requests
|
|
||||||
import subprocess
|
|
||||||
import random
|
|
||||||
import platform
|
|
||||||
|
|
||||||
ARCH = "64" if platform.machine().endswith("64") else "32"
|
|
||||||
PROGRAM_DIR = os.path.join(os.environ["ProgramFiles"], "TacticalAgent")
|
|
||||||
TAC_RMM = os.path.join(PROGRAM_DIR, "tacticalrmm.exe")
|
|
||||||
NSSM = os.path.join(PROGRAM_DIR, "nssm.exe" if ARCH == "64" else "nssm-x86.exe")
|
|
||||||
TEMP_DIR = os.path.join(os.environ["WINDIR"], "Temp")
|
|
||||||
SYS_DRIVE = os.environ["SystemDrive"]
|
|
||||||
PY_BIN = os.path.join(SYS_DRIVE, "\\salt", "bin", "python.exe")
|
|
||||||
SALT_CALL = os.path.join(SYS_DRIVE, "\\salt", "salt-call.bat")
|
|
||||||
|
|
||||||
|
|
||||||
def get_services():
|
|
||||||
# see https://github.com/wh1te909/tacticalrmm/issues/38
|
|
||||||
# for why I am manually implementing the svc.as_dict() method of psutil
|
|
||||||
ret = []
|
|
||||||
for svc in psutil.win_service_iter():
|
|
||||||
i = {}
|
|
||||||
try:
|
|
||||||
i["display_name"] = svc.display_name()
|
|
||||||
i["binpath"] = svc.binpath()
|
|
||||||
i["username"] = svc.username()
|
|
||||||
i["start_type"] = svc.start_type()
|
|
||||||
i["status"] = svc.status()
|
|
||||||
i["pid"] = svc.pid()
|
|
||||||
i["name"] = svc.name()
|
|
||||||
i["description"] = svc.description()
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
ret.append(i)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def run_python_script(filename, timeout, script_type="userdefined"):
|
|
||||||
# no longer used in agent version 0.11.0
|
|
||||||
file_path = os.path.join(TEMP_DIR, filename)
|
|
||||||
|
|
||||||
if os.path.exists(file_path):
|
|
||||||
try:
|
|
||||||
os.remove(file_path)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if script_type == "userdefined":
|
|
||||||
__salt__["cp.get_file"](f"salt://scripts/userdefined/{filename}", file_path)
|
|
||||||
else:
|
|
||||||
__salt__["cp.get_file"](f"salt://scripts/{filename}", file_path)
|
|
||||||
|
|
||||||
return __salt__["cmd.run_all"](f"{PY_BIN} {file_path}", timeout=timeout)
|
|
||||||
|
|
||||||
|
|
||||||
def run_script(filepath, filename, shell, timeout, args=[], bg=False):
|
|
||||||
if shell == "powershell" or shell == "cmd":
|
|
||||||
if args:
|
|
||||||
return __salt__["cmd.script"](
|
|
||||||
source=filepath,
|
|
||||||
args=" ".join(map(lambda x: f'"{x}"', args)),
|
|
||||||
shell=shell,
|
|
||||||
timeout=timeout,
|
|
||||||
bg=bg,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return __salt__["cmd.script"](
|
|
||||||
source=filepath, shell=shell, timeout=timeout, bg=bg
|
|
||||||
)
|
|
||||||
|
|
||||||
elif shell == "python":
|
|
||||||
file_path = os.path.join(TEMP_DIR, filename)
|
|
||||||
|
|
||||||
if os.path.exists(file_path):
|
|
||||||
try:
|
|
||||||
os.remove(file_path)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
__salt__["cp.get_file"](filepath, file_path)
|
|
||||||
|
|
||||||
salt_cmd = "cmd.run_bg" if bg else "cmd.run_all"
|
|
||||||
|
|
||||||
if args:
|
|
||||||
a = " ".join(map(lambda x: f'"{x}"', args))
|
|
||||||
cmd = f"{PY_BIN} {file_path} {a}"
|
|
||||||
return __salt__[salt_cmd](cmd, timeout=timeout)
|
|
||||||
else:
|
|
||||||
return __salt__[salt_cmd](f"{PY_BIN} {file_path}", timeout=timeout)
|
|
||||||
|
|
||||||
|
|
||||||
def uninstall_agent():
|
|
||||||
remove_exe = os.path.join(PROGRAM_DIR, "unins000.exe")
|
|
||||||
__salt__["cmd.run_bg"]([remove_exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"])
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
def update_salt():
|
|
||||||
for p in psutil.process_iter():
|
|
||||||
with p.oneshot():
|
|
||||||
if p.name() == "tacticalrmm.exe" and "updatesalt" in p.cmdline():
|
|
||||||
return "running"
|
|
||||||
|
|
||||||
from subprocess import Popen, PIPE
|
|
||||||
|
|
||||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
|
||||||
DETACHED_PROCESS = 0x00000008
|
|
||||||
cmd = [TAC_RMM, "-m", "updatesalt"]
|
|
||||||
p = Popen(
|
|
||||||
cmd,
|
|
||||||
stdin=PIPE,
|
|
||||||
stdout=PIPE,
|
|
||||||
stderr=PIPE,
|
|
||||||
close_fds=True,
|
|
||||||
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
|
||||||
)
|
|
||||||
return p.pid
|
|
||||||
|
|
||||||
|
|
||||||
def run_manual_checks():
|
|
||||||
__salt__["cmd.run_bg"]([TAC_RMM, "-m", "runchecks"])
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
def install_updates():
|
|
||||||
for p in psutil.process_iter():
|
|
||||||
with p.oneshot():
|
|
||||||
if p.name() == "tacticalrmm.exe" and "winupdater" in p.cmdline():
|
|
||||||
return "running"
|
|
||||||
|
|
||||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "winupdater"])
|
|
||||||
|
|
||||||
|
|
||||||
def _wait_for_service(svc, status, retries=10):
|
|
||||||
attempts = 0
|
|
||||||
while 1:
|
|
||||||
try:
|
|
||||||
service = psutil.win_service_get(svc)
|
|
||||||
except psutil.NoSuchProcess:
|
|
||||||
stat = "fail"
|
|
||||||
attempts += 1
|
|
||||||
sleep(5)
|
|
||||||
else:
|
|
||||||
stat = service.status()
|
|
||||||
if stat != status:
|
|
||||||
attempts += 1
|
|
||||||
sleep(5)
|
|
||||||
else:
|
|
||||||
attempts = 0
|
|
||||||
|
|
||||||
if attempts == 0 or attempts > retries:
|
|
||||||
break
|
|
||||||
|
|
||||||
return stat
|
|
||||||
|
|
||||||
|
|
||||||
def agent_update_v2(inno, url):
|
|
||||||
# make sure another instance of the update is not running
|
|
||||||
# this function spawns 2 instances of itself (because we call it twice with salt run_bg)
|
|
||||||
# so if more than 2 running, don't continue as an update is already running
|
|
||||||
count = 0
|
|
||||||
for p in psutil.process_iter():
|
|
||||||
try:
|
|
||||||
with p.oneshot():
|
|
||||||
if "win_agent.agent_update_v2" in p.cmdline():
|
|
||||||
count += 1
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if count > 2:
|
|
||||||
return "already running"
|
|
||||||
|
|
||||||
sleep(random.randint(1, 20)) # don't flood the rmm
|
|
||||||
|
|
||||||
exe = os.path.join(TEMP_DIR, inno)
|
|
||||||
|
|
||||||
if os.path.exists(exe):
|
|
||||||
try:
|
|
||||||
os.remove(exe)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
r = requests.get(url, stream=True, timeout=600)
|
|
||||||
except Exception:
|
|
||||||
return "failed"
|
|
||||||
|
|
||||||
if r.status_code != 200:
|
|
||||||
return "failed"
|
|
||||||
|
|
||||||
with open(exe, "wb") as f:
|
|
||||||
for chunk in r.iter_content(chunk_size=1024):
|
|
||||||
if chunk:
|
|
||||||
f.write(chunk)
|
|
||||||
del r
|
|
||||||
|
|
||||||
ret = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=120)
|
|
||||||
|
|
||||||
tac = _wait_for_service(svc="tacticalagent", status="running")
|
|
||||||
if tac != "running":
|
|
||||||
subprocess.run([NSSM, "start", "tacticalagent"], timeout=30)
|
|
||||||
|
|
||||||
chk = _wait_for_service(svc="checkrunner", status="running")
|
|
||||||
if chk != "running":
|
|
||||||
subprocess.run([NSSM, "start", "checkrunner"], timeout=30)
|
|
||||||
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
def do_agent_update_v2(inno, url):
|
|
||||||
return __salt__["cmd.run_bg"](
|
|
||||||
[
|
|
||||||
SALT_CALL,
|
|
||||||
"win_agent.agent_update_v2",
|
|
||||||
f"inno={inno}",
|
|
||||||
f"url={url}",
|
|
||||||
"--local",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def agent_update(version, url):
|
|
||||||
# make sure another instance of the update is not running
|
|
||||||
# this function spawns 2 instances of itself so if more than 2 running,
|
|
||||||
# don't continue as an update is already running
|
|
||||||
count = 0
|
|
||||||
for p in psutil.process_iter():
|
|
||||||
try:
|
|
||||||
with p.oneshot():
|
|
||||||
if "win_agent.agent_update" in p.cmdline():
|
|
||||||
count += 1
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if count > 2:
|
|
||||||
return "already running"
|
|
||||||
|
|
||||||
sleep(random.randint(1, 60)) # don't flood the rmm
|
|
||||||
try:
|
|
||||||
r = requests.get(url, stream=True, timeout=600)
|
|
||||||
except Exception:
|
|
||||||
return "failed"
|
|
||||||
|
|
||||||
if r.status_code != 200:
|
|
||||||
return "failed"
|
|
||||||
|
|
||||||
exe = os.path.join(TEMP_DIR, f"winagent-v{version}.exe")
|
|
||||||
|
|
||||||
with open(exe, "wb") as f:
|
|
||||||
for chunk in r.iter_content(chunk_size=1024):
|
|
||||||
if chunk:
|
|
||||||
f.write(chunk)
|
|
||||||
del r
|
|
||||||
|
|
||||||
services = ("tacticalagent", "checkrunner")
|
|
||||||
|
|
||||||
for svc in services:
|
|
||||||
subprocess.run([NSSM, "stop", svc], timeout=120)
|
|
||||||
|
|
||||||
sleep(10)
|
|
||||||
r = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=300)
|
|
||||||
sleep(30)
|
|
||||||
|
|
||||||
for svc in services:
|
|
||||||
subprocess.run([NSSM, "start", svc], timeout=120)
|
|
||||||
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
def do_agent_update(version, url):
|
|
||||||
return __salt__["cmd.run_bg"](
|
|
||||||
[
|
|
||||||
SALT_CALL,
|
|
||||||
"win_agent.agent_update",
|
|
||||||
f"version={version}",
|
|
||||||
f"url={url}",
|
|
||||||
"--local",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SystemDetail:
|
|
||||||
def __init__(self):
|
|
||||||
self.c = wmi.WMI()
|
|
||||||
self.comp_sys_prod = self.c.Win32_ComputerSystemProduct()
|
|
||||||
self.comp_sys = self.c.Win32_ComputerSystem()
|
|
||||||
self.memory = self.c.Win32_PhysicalMemory()
|
|
||||||
self.os = self.c.Win32_OperatingSystem()
|
|
||||||
self.base_board = self.c.Win32_BaseBoard()
|
|
||||||
self.bios = self.c.Win32_BIOS()
|
|
||||||
self.disk = self.c.Win32_DiskDrive()
|
|
||||||
self.network_adapter = self.c.Win32_NetworkAdapter()
|
|
||||||
self.network_config = self.c.Win32_NetworkAdapterConfiguration()
|
|
||||||
self.desktop_monitor = self.c.Win32_DesktopMonitor()
|
|
||||||
self.cpu = self.c.Win32_Processor()
|
|
||||||
self.usb = self.c.Win32_USBController()
|
|
||||||
|
|
||||||
def get_all(self, obj):
|
|
||||||
ret = []
|
|
||||||
for i in obj:
|
|
||||||
tmp = [
|
|
||||||
{j: getattr(i, j)}
|
|
||||||
for j in list(i.properties)
|
|
||||||
if getattr(i, j) is not None
|
|
||||||
]
|
|
||||||
ret.append(tmp)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def system_info():
|
|
||||||
info = SystemDetail()
|
|
||||||
return {
|
|
||||||
"comp_sys_prod": info.get_all(info.comp_sys_prod),
|
|
||||||
"comp_sys": info.get_all(info.comp_sys),
|
|
||||||
"mem": info.get_all(info.memory),
|
|
||||||
"os": info.get_all(info.os),
|
|
||||||
"base_board": info.get_all(info.base_board),
|
|
||||||
"bios": info.get_all(info.bios),
|
|
||||||
"disk": info.get_all(info.disk),
|
|
||||||
"network_adapter": info.get_all(info.network_adapter),
|
|
||||||
"network_config": info.get_all(info.network_config),
|
|
||||||
"desktop_monitor": info.get_all(info.desktop_monitor),
|
|
||||||
"cpu": info.get_all(info.cpu),
|
|
||||||
"usb": info.get_all(info.usb),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def local_sys_info():
|
|
||||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "sysinfo"])
|
|
||||||
|
|
||||||
|
|
||||||
def get_procs():
|
|
||||||
ret = []
|
|
||||||
|
|
||||||
# setup
|
|
||||||
for proc in psutil.process_iter():
|
|
||||||
with proc.oneshot():
|
|
||||||
proc.cpu_percent(interval=None)
|
|
||||||
|
|
||||||
# need time for psutil to record cpu percent
|
|
||||||
sleep(1)
|
|
||||||
|
|
||||||
for c, proc in enumerate(psutil.process_iter(), 1):
|
|
||||||
x = {}
|
|
||||||
with proc.oneshot():
|
|
||||||
if proc.pid == 0 or not proc.name():
|
|
||||||
continue
|
|
||||||
|
|
||||||
x["name"] = proc.name()
|
|
||||||
x["cpu_percent"] = proc.cpu_percent(interval=None) / psutil.cpu_count()
|
|
||||||
x["memory_percent"] = proc.memory_percent()
|
|
||||||
x["pid"] = proc.pid
|
|
||||||
x["ppid"] = proc.ppid()
|
|
||||||
x["status"] = proc.status()
|
|
||||||
x["username"] = proc.username()
|
|
||||||
x["id"] = c
|
|
||||||
|
|
||||||
ret.append(x)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def _compress_json(j):
|
|
||||||
return {
|
|
||||||
"wineventlog": base64.b64encode(
|
|
||||||
zlib.compress(json.dumps(j).encode("utf-8", errors="ignore"))
|
|
||||||
).decode("ascii", errors="ignore")
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_eventlog(logtype, last_n_days):
|
|
||||||
|
|
||||||
start_time = datetime.datetime.now() - datetime.timedelta(days=last_n_days)
|
|
||||||
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ
|
|
||||||
|
|
||||||
status_dict = {
|
|
||||||
win32con.EVENTLOG_AUDIT_FAILURE: "AUDIT_FAILURE",
|
|
||||||
win32con.EVENTLOG_AUDIT_SUCCESS: "AUDIT_SUCCESS",
|
|
||||||
win32con.EVENTLOG_INFORMATION_TYPE: "INFO",
|
|
||||||
win32con.EVENTLOG_WARNING_TYPE: "WARNING",
|
|
||||||
win32con.EVENTLOG_ERROR_TYPE: "ERROR",
|
|
||||||
0: "INFO",
|
|
||||||
}
|
|
||||||
|
|
||||||
computer = "localhost"
|
|
||||||
hand = win32evtlog.OpenEventLog(computer, logtype)
|
|
||||||
total = win32evtlog.GetNumberOfEventLogRecords(hand)
|
|
||||||
log = []
|
|
||||||
uid = 0
|
|
||||||
done = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
while 1:
|
|
||||||
events = win32evtlog.ReadEventLog(hand, flags, 0)
|
|
||||||
for ev_obj in events:
|
|
||||||
|
|
||||||
uid += 1
|
|
||||||
# return once total number of events reach or we'll be stuck in an infinite loop
|
|
||||||
if uid >= total:
|
|
||||||
done = True
|
|
||||||
break
|
|
||||||
|
|
||||||
the_time = ev_obj.TimeGenerated.Format()
|
|
||||||
time_obj = datetime.datetime.strptime(the_time, "%c")
|
|
||||||
if time_obj < start_time:
|
|
||||||
done = True
|
|
||||||
break
|
|
||||||
|
|
||||||
computer = str(ev_obj.ComputerName)
|
|
||||||
src = str(ev_obj.SourceName)
|
|
||||||
evt_type = str(status_dict[ev_obj.EventType])
|
|
||||||
evt_id = str(winerror.HRESULT_CODE(ev_obj.EventID))
|
|
||||||
evt_category = str(ev_obj.EventCategory)
|
|
||||||
record = str(ev_obj.RecordNumber)
|
|
||||||
msg = (
|
|
||||||
str(win32evtlogutil.SafeFormatMessage(ev_obj, logtype))
|
|
||||||
.replace("<", "")
|
|
||||||
.replace(">", "")
|
|
||||||
)
|
|
||||||
|
|
||||||
event_dict = {
|
|
||||||
"computer": computer,
|
|
||||||
"source": src,
|
|
||||||
"eventType": evt_type,
|
|
||||||
"eventID": evt_id,
|
|
||||||
"eventCategory": evt_category,
|
|
||||||
"message": msg,
|
|
||||||
"time": the_time,
|
|
||||||
"record": record,
|
|
||||||
"uid": uid,
|
|
||||||
}
|
|
||||||
|
|
||||||
log.append(event_dict)
|
|
||||||
|
|
||||||
if done:
|
|
||||||
break
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
win32evtlog.CloseEventLog(hand)
|
|
||||||
return _compress_json(log)
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from rest_framework.authtoken.admin import TokenAdmin
|
from rest_framework.authtoken.admin import TokenAdmin
|
||||||
|
|
||||||
from .models import User
|
from .models import User
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from django.utils import timezone as djangotime
|
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
from knox.models import AuthToken
|
from knox.models import AuthToken
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
import pyotp
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
import pyotp
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "Generates barcode for Google Authenticator and creates totp for user"
|
help = "Generates barcode for Authenticator and creates totp for user"
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument("code", type=str)
|
parser.add_argument("code", type=str)
|
||||||
@@ -24,12 +26,10 @@ class Command(BaseCommand):
|
|||||||
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
||||||
subprocess.run(f'qr "{url}"', shell=True)
|
subprocess.run(f'qr "{url}"', shell=True)
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.SUCCESS(
|
self.style.SUCCESS("Scan the barcode above with your authenticator app")
|
||||||
"Scan the barcode above with your google authenticator app"
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.SUCCESS(
|
self.style.SUCCESS(
|
||||||
f"If that doesn't work you may manually enter the key: {code}"
|
f"If that doesn't work you may manually enter the setup key: {code}"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|||||||
57
api/tacticalrmm/accounts/management/commands/reset_2fa.py
Normal file
57
api/tacticalrmm/accounts/management/commands/reset_2fa.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
import pyotp
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from accounts.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Reset 2fa"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("username", type=str)
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
username = kwargs["username"]
|
||||||
|
try:
|
||||||
|
user = User.objects.get(username=username)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||||
|
return
|
||||||
|
|
||||||
|
domain = "Tactical RMM"
|
||||||
|
nginx = "/etc/nginx/sites-available/frontend.conf"
|
||||||
|
found = None
|
||||||
|
if os.path.exists(nginx):
|
||||||
|
try:
|
||||||
|
with open(nginx, "r") as f:
|
||||||
|
for line in f:
|
||||||
|
if "server_name" in line:
|
||||||
|
found = line
|
||||||
|
break
|
||||||
|
|
||||||
|
if found:
|
||||||
|
rep = found.replace("server_name", "").replace(";", "")
|
||||||
|
domain = "".join(rep.split())
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
code = pyotp.random_base32()
|
||||||
|
user.totp_key = code
|
||||||
|
user.save(update_fields=["totp_key"])
|
||||||
|
|
||||||
|
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
||||||
|
subprocess.run(f'qr "{url}"', shell=True)
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.WARNING("Scan the barcode above with your authenticator app")
|
||||||
|
)
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.WARNING(
|
||||||
|
f"If that doesn't work you may manually enter the setup key: {code}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(f"2fa was successfully reset for user {username}")
|
||||||
|
)
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from accounts.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Reset password for user"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("username", type=str)
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
username = kwargs["username"]
|
||||||
|
try:
|
||||||
|
user = User.objects.get(username=username)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||||
|
return
|
||||||
|
|
||||||
|
passwd = input("Enter new password: ")
|
||||||
|
user.set_password(passwd)
|
||||||
|
user.save()
|
||||||
|
self.stdout.write(self.style.SUCCESS(f"Password for {username} was reset!"))
|
||||||
@@ -2,8 +2,8 @@
|
|||||||
|
|
||||||
import django.contrib.auth.models
|
import django.contrib.auth.models
|
||||||
import django.contrib.auth.validators
|
import django.contrib.auth.validators
|
||||||
from django.db import migrations, models
|
|
||||||
import django.utils.timezone
|
import django.utils.timezone
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.1.2 on 2020-11-10 20:24
|
# Generated by Django 3.1.2 on 2020-11-10 20:24
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 3.1.5 on 2021-01-18 09:40
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("accounts", "0010_user_agent_dblclick_action"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="default_agent_tbl_tab",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("server", "Servers"),
|
||||||
|
("workstation", "Workstations"),
|
||||||
|
("mixed", "Mixed"),
|
||||||
|
],
|
||||||
|
default="server",
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-02-28 06:38
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0011_user_default_agent_tbl_tab'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='agents_per_page',
|
||||||
|
field=models.PositiveIntegerField(default=50),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
from django.db import models
|
|
||||||
from django.contrib.auth.models import AbstractUser
|
from django.contrib.auth.models import AbstractUser
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
|
||||||
@@ -9,6 +9,12 @@ AGENT_DBLCLICK_CHOICES = [
|
|||||||
("remotebg", "Remote Background"),
|
("remotebg", "Remote Background"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
AGENT_TBL_TAB_CHOICES = [
|
||||||
|
("server", "Servers"),
|
||||||
|
("workstation", "Workstations"),
|
||||||
|
("mixed", "Mixed"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class User(AbstractUser, BaseAuditModel):
|
class User(AbstractUser, BaseAuditModel):
|
||||||
is_active = models.BooleanField(default=True)
|
is_active = models.BooleanField(default=True)
|
||||||
@@ -18,6 +24,10 @@ class User(AbstractUser, BaseAuditModel):
|
|||||||
agent_dblclick_action = models.CharField(
|
agent_dblclick_action = models.CharField(
|
||||||
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
||||||
)
|
)
|
||||||
|
default_agent_tbl_tab = models.CharField(
|
||||||
|
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
|
||||||
|
)
|
||||||
|
agents_per_page = models.PositiveIntegerField(default=50)
|
||||||
|
|
||||||
agent = models.OneToOneField(
|
agent = models.OneToOneField(
|
||||||
"agents.Agent",
|
"agents.Agent",
|
||||||
|
|||||||
@@ -1,9 +1,5 @@
|
|||||||
import pyotp
|
import pyotp
|
||||||
|
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||||
from rest_framework.serializers import (
|
|
||||||
ModelSerializer,
|
|
||||||
SerializerMethodField,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .models import User
|
from .models import User
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.test import override_settings
|
from django.test import override_settings
|
||||||
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestAccounts(TacticalTestCase):
|
class TestAccounts(TacticalTestCase):
|
||||||
@@ -278,15 +279,12 @@ class TestUserAction(TacticalTestCase):
|
|||||||
r = self.client.patch(url, data, format="json")
|
r = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
data = {"agent_dblclick_action": "editagent"}
|
data = {
|
||||||
r = self.client.patch(url, data, format="json")
|
"userui": True,
|
||||||
self.assertEqual(r.status_code, 200)
|
"agent_dblclick_action": "editagent",
|
||||||
|
"default_agent_tbl_tab": "mixed",
|
||||||
data = {"agent_dblclick_action": "remotebg"}
|
"agents_per_page": 1000,
|
||||||
r = self.client.patch(url, data, format="json")
|
}
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
data = {"agent_dblclick_action": "takecontrol"}
|
|
||||||
r = self.client.patch(url, data, format="json")
|
r = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
|
|||||||
@@ -1,23 +1,20 @@
|
|||||||
import pyotp
|
import pyotp
|
||||||
|
|
||||||
from django.contrib.auth import login
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.shortcuts import get_object_or_404
|
from django.contrib.auth import login
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
from rest_framework.views import APIView
|
|
||||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
|
||||||
from knox.views import LoginView as KnoxLoginView
|
from knox.views import LoginView as KnoxLoginView
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from .models import User
|
|
||||||
from agents.models import Agent
|
|
||||||
from logs.models import AuditLog
|
from logs.models import AuditLog
|
||||||
from tacticalrmm.utils import notify_error
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
from .serializers import UserSerializer, TOTPSetupSerializer
|
from .models import User
|
||||||
|
from .serializers import TOTPSetupSerializer, UserSerializer
|
||||||
|
|
||||||
|
|
||||||
class CheckCreds(KnoxLoginView):
|
class CheckCreds(KnoxLoginView):
|
||||||
@@ -81,7 +78,7 @@ class GetAddUsers(APIView):
|
|||||||
def post(self, request):
|
def post(self, request):
|
||||||
# add new user
|
# add new user
|
||||||
try:
|
try:
|
||||||
user = User.objects.create_user(
|
user = User.objects.create_user( # type: ignore
|
||||||
request.data["username"],
|
request.data["username"],
|
||||||
request.data["email"],
|
request.data["email"],
|
||||||
request.data["password"],
|
request.data["password"],
|
||||||
@@ -189,16 +186,21 @@ class UserUI(APIView):
|
|||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
user = request.user
|
user = request.user
|
||||||
|
|
||||||
if "dark_mode" in request.data:
|
if "dark_mode" in request.data.keys():
|
||||||
user.dark_mode = request.data["dark_mode"]
|
user.dark_mode = request.data["dark_mode"]
|
||||||
user.save(update_fields=["dark_mode"])
|
user.save(update_fields=["dark_mode"])
|
||||||
|
|
||||||
if "show_community_scripts" in request.data:
|
if "show_community_scripts" in request.data.keys():
|
||||||
user.show_community_scripts = request.data["show_community_scripts"]
|
user.show_community_scripts = request.data["show_community_scripts"]
|
||||||
user.save(update_fields=["show_community_scripts"])
|
user.save(update_fields=["show_community_scripts"])
|
||||||
|
|
||||||
if "agent_dblclick_action" in request.data:
|
if "userui" in request.data.keys():
|
||||||
user.agent_dblclick_action = request.data["agent_dblclick_action"]
|
user.agent_dblclick_action = request.data["agent_dblclick_action"]
|
||||||
user.save(update_fields=["agent_dblclick_action"])
|
user.default_agent_tbl_tab = request.data["default_agent_tbl_tab"]
|
||||||
|
user.save(update_fields=["agent_dblclick_action", "default_agent_tbl_tab"])
|
||||||
|
|
||||||
|
if "agents_per_page" in request.data.keys():
|
||||||
|
user.agents_per_page = request.data["agents_per_page"]
|
||||||
|
user.save(update_fields=["agents_per_page"])
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from .models import Agent, AgentOutage, RecoveryAction, Note
|
from .models import Agent, Note, RecoveryAction
|
||||||
|
|
||||||
admin.site.register(Agent)
|
admin.site.register(Agent)
|
||||||
admin.site.register(AgentOutage)
|
|
||||||
admin.site.register(RecoveryAction)
|
admin.site.register(RecoveryAction)
|
||||||
admin.site.register(Note)
|
admin.site.register(Note)
|
||||||
|
|||||||
@@ -1,14 +1,12 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import os
|
|
||||||
import json
|
|
||||||
|
|
||||||
from model_bakery.recipe import Recipe, seq
|
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
from django.utils import timezone as djangotime
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
from .models import Agent
|
from django.conf import settings
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
from model_bakery.recipe import Recipe, foreign_key, seq
|
||||||
|
|
||||||
|
|
||||||
def generate_agent_id(hostname):
|
def generate_agent_id(hostname):
|
||||||
@@ -16,6 +14,9 @@ def generate_agent_id(hostname):
|
|||||||
return f"{rand}-{hostname}"
|
return f"{rand}-{hostname}"
|
||||||
|
|
||||||
|
|
||||||
|
site = Recipe("clients.Site")
|
||||||
|
|
||||||
|
|
||||||
def get_wmi_data():
|
def get_wmi_data():
|
||||||
with open(
|
with open(
|
||||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json")
|
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json")
|
||||||
@@ -24,12 +25,12 @@ def get_wmi_data():
|
|||||||
|
|
||||||
|
|
||||||
agent = Recipe(
|
agent = Recipe(
|
||||||
Agent,
|
"agents.Agent",
|
||||||
|
site=foreign_key(site),
|
||||||
hostname="DESKTOP-TEST123",
|
hostname="DESKTOP-TEST123",
|
||||||
version="1.1.1",
|
version="1.3.0",
|
||||||
monitoring_type=cycle(["workstation", "server"]),
|
monitoring_type=cycle(["workstation", "server"]),
|
||||||
salt_id=generate_agent_id("DESKTOP-TEST123"),
|
agent_id=seq("asdkj3h4234-1234hg3h4g34-234jjh34|DESKTOP-TEST123"),
|
||||||
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
server_agent = agent.extend(
|
server_agent = agent.extend(
|
||||||
@@ -42,8 +43,12 @@ workstation_agent = agent.extend(
|
|||||||
|
|
||||||
online_agent = agent.extend(last_seen=djangotime.now())
|
online_agent = agent.extend(last_seen=djangotime.now())
|
||||||
|
|
||||||
|
offline_agent = agent.extend(
|
||||||
|
last_seen=djangotime.now() - djangotime.timedelta(minutes=7)
|
||||||
|
)
|
||||||
|
|
||||||
overdue_agent = agent.extend(
|
overdue_agent = agent.extend(
|
||||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=6)
|
last_seen=djangotime.now() - djangotime.timedelta(minutes=35)
|
||||||
)
|
)
|
||||||
|
|
||||||
agent_with_services = agent.extend(
|
agent_with_services = agent.extend(
|
||||||
|
|||||||
@@ -0,0 +1,93 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from agents.models import Agent
|
||||||
|
from clients.models import Client, Site
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Bulk update agent offline/overdue time"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("time", type=int, help="Time in minutes")
|
||||||
|
parser.add_argument(
|
||||||
|
"--client",
|
||||||
|
type=str,
|
||||||
|
help="Client Name",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--site",
|
||||||
|
type=str,
|
||||||
|
help="Site Name",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--offline",
|
||||||
|
action="store_true",
|
||||||
|
help="Offline",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--overdue",
|
||||||
|
action="store_true",
|
||||||
|
help="Overdue",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--all",
|
||||||
|
action="store_true",
|
||||||
|
help="All agents",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
time = kwargs["time"]
|
||||||
|
client_name = kwargs["client"]
|
||||||
|
site_name = kwargs["site"]
|
||||||
|
all_agents = kwargs["all"]
|
||||||
|
offline = kwargs["offline"]
|
||||||
|
overdue = kwargs["overdue"]
|
||||||
|
agents = None
|
||||||
|
|
||||||
|
if offline and time < 2:
|
||||||
|
self.stdout.write(self.style.ERROR("Minimum offline time is 2 minutes"))
|
||||||
|
return
|
||||||
|
|
||||||
|
if overdue and time < 3:
|
||||||
|
self.stdout.write(self.style.ERROR("Minimum overdue time is 3 minutes"))
|
||||||
|
return
|
||||||
|
|
||||||
|
if client_name:
|
||||||
|
try:
|
||||||
|
client = Client.objects.get(name=client_name)
|
||||||
|
except Client.DoesNotExist:
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.ERROR(f"Client {client_name} doesn't exist")
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
agents = Agent.objects.filter(site__client=client)
|
||||||
|
|
||||||
|
elif site_name:
|
||||||
|
try:
|
||||||
|
site = Site.objects.get(name=site_name)
|
||||||
|
except Site.DoesNotExist:
|
||||||
|
self.stdout.write(self.style.ERROR(f"Site {site_name} doesn't exist"))
|
||||||
|
return
|
||||||
|
|
||||||
|
agents = Agent.objects.filter(site=site)
|
||||||
|
|
||||||
|
elif all_agents:
|
||||||
|
agents = Agent.objects.all()
|
||||||
|
|
||||||
|
if agents:
|
||||||
|
if offline:
|
||||||
|
agents.update(offline_time=time)
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(
|
||||||
|
f"Changed offline time on {len(agents)} agents to {time} minutes"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if overdue:
|
||||||
|
agents.update(overdue_time=time)
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(
|
||||||
|
f"Changed overdue time on {len(agents)} agents to {time} minutes"
|
||||||
|
)
|
||||||
|
)
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
from django.conf import settings
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from agents.models import Agent
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Shows online agents that are not on the latest version"
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
q = Agent.objects.exclude(version=settings.LATEST_AGENT_VER).only(
|
||||||
|
"pk", "version", "last_seen", "overdue_time", "offline_time"
|
||||||
|
)
|
||||||
|
agents = [i for i in q if i.status == "online"]
|
||||||
|
for agent in agents:
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(f"{agent.hostname} - v{agent.version}")
|
||||||
|
)
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
# Generated by Django 3.0.6 on 2020-05-31 01:23
|
# Generated by Django 3.0.6 on 2020-05-31 01:23
|
||||||
|
|
||||||
import django.contrib.postgres.fields.jsonb
|
import django.contrib.postgres.fields.jsonb
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.0.7 on 2020-06-09 16:07
|
# Generated by Django 3.0.7 on 2020-06-09 16:07
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.0.8 on 2020-08-09 05:31
|
# Generated by Django 3.0.8 on 2020-08-09 05:31
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# Generated by Django 3.1.1 on 2020-09-22 20:57
|
# Generated by Django 3.1.1 on 2020-09-22 20:57
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.1.2 on 2020-11-01 22:53
|
# Generated by Django 3.1.2 on 2020-11-01 22:53
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-29 21:11
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0026_auto_20201125_2334'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='agent',
|
||||||
|
name='overdue_dashboard_alert',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
23
api/tacticalrmm/agents/migrations/0028_auto_20210206_1534.py
Normal file
23
api/tacticalrmm/agents/migrations/0028_auto_20210206_1534.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-06 15:34
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0027_agent_overdue_dashboard_alert'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='agentoutage',
|
||||||
|
name='outage_email_sent_time',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='agentoutage',
|
||||||
|
name='outage_sms_sent_time',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
16
api/tacticalrmm/agents/migrations/0029_delete_agentoutage.py
Normal file
16
api/tacticalrmm/agents/migrations/0029_delete_agentoutage.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-10 21:56
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0028_auto_20210206_1534'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.DeleteModel(
|
||||||
|
name='AgentOutage',
|
||||||
|
),
|
||||||
|
]
|
||||||
18
api/tacticalrmm/agents/migrations/0030_agent_offline_time.py
Normal file
18
api/tacticalrmm/agents/migrations/0030_agent_offline_time.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.6 on 2021-02-16 08:50
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0029_delete_agentoutage'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='agent',
|
||||||
|
name='offline_time',
|
||||||
|
field=models.PositiveIntegerField(default=4),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,25 +1,27 @@
|
|||||||
import requests
|
import asyncio
|
||||||
import time
|
|
||||||
import base64
|
import base64
|
||||||
from Crypto.Cipher import AES
|
|
||||||
from Crypto.Random import get_random_bytes
|
|
||||||
from Crypto.Hash import SHA3_384
|
|
||||||
from Crypto.Util.Padding import pad
|
|
||||||
import validators
|
|
||||||
import msgpack
|
|
||||||
import re
|
import re
|
||||||
|
import time
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from loguru import logger
|
|
||||||
from packaging import version as pyver
|
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
import msgpack
|
||||||
|
import validators
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
from Crypto.Hash import SHA3_384
|
||||||
|
from Crypto.Random import get_random_bytes
|
||||||
|
from Crypto.Util.Padding import pad
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import models
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
from loguru import logger
|
||||||
from nats.aio.client import Client as NATS
|
from nats.aio.client import Client as NATS
|
||||||
from nats.aio.errors import ErrTimeout
|
from nats.aio.errors import ErrTimeout
|
||||||
|
from packaging import version as pyver
|
||||||
|
|
||||||
from django.db import models
|
from alerts.models import AlertTemplate
|
||||||
from django.conf import settings
|
from core.models import TZ_CHOICES, CoreSettings
|
||||||
from django.utils import timezone as djangotime
|
|
||||||
|
|
||||||
from core.models import CoreSettings, TZ_CHOICES
|
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
@@ -50,6 +52,8 @@ class Agent(BaseAuditModel):
|
|||||||
mesh_node_id = models.CharField(null=True, blank=True, max_length=255)
|
mesh_node_id = models.CharField(null=True, blank=True, max_length=255)
|
||||||
overdue_email_alert = models.BooleanField(default=False)
|
overdue_email_alert = models.BooleanField(default=False)
|
||||||
overdue_text_alert = models.BooleanField(default=False)
|
overdue_text_alert = models.BooleanField(default=False)
|
||||||
|
overdue_dashboard_alert = models.BooleanField(default=False)
|
||||||
|
offline_time = models.PositiveIntegerField(default=4)
|
||||||
overdue_time = models.PositiveIntegerField(default=30)
|
overdue_time = models.PositiveIntegerField(default=30)
|
||||||
check_interval = models.PositiveIntegerField(default=120)
|
check_interval = models.PositiveIntegerField(default=120)
|
||||||
needs_reboot = models.BooleanField(default=False)
|
needs_reboot = models.BooleanField(default=False)
|
||||||
@@ -75,6 +79,24 @@ class Agent(BaseAuditModel):
|
|||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
|
||||||
|
# get old agent if exists
|
||||||
|
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||||
|
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||||
|
|
||||||
|
# check if new agent has been create
|
||||||
|
# or check if policy have changed on agent
|
||||||
|
# or if site has changed on agent and if so generate-policies
|
||||||
|
if (
|
||||||
|
not old_agent
|
||||||
|
or old_agent
|
||||||
|
and old_agent.policy != self.policy
|
||||||
|
or old_agent.site != self.site
|
||||||
|
):
|
||||||
|
self.generate_checks_from_policies()
|
||||||
|
self.generate_tasks_from_policies()
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.hostname
|
return self.hostname
|
||||||
|
|
||||||
@@ -117,14 +139,6 @@ class Agent(BaseAuditModel):
|
|||||||
return settings.DL_32
|
return settings.DL_32
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
|
||||||
def winsalt_dl(self):
|
|
||||||
if self.arch == "64":
|
|
||||||
return settings.SALT_64
|
|
||||||
elif self.arch == "32":
|
|
||||||
return settings.SALT_32
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def win_inno_exe(self):
|
def win_inno_exe(self):
|
||||||
if self.arch == "64":
|
if self.arch == "64":
|
||||||
@@ -135,7 +149,7 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self):
|
def status(self):
|
||||||
offline = djangotime.now() - djangotime.timedelta(minutes=6)
|
offline = djangotime.now() - djangotime.timedelta(minutes=self.offline_time)
|
||||||
overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time)
|
overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time)
|
||||||
|
|
||||||
if self.last_seen is not None:
|
if self.last_seen is not None:
|
||||||
@@ -150,14 +164,14 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def has_patches_pending(self):
|
def has_patches_pending(self):
|
||||||
return self.winupdates.filter(action="approve").filter(installed=False).exists()
|
return self.winupdates.filter(action="approve").filter(installed=False).exists() # type: ignore
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def checks(self):
|
def checks(self):
|
||||||
total, passing, failing = 0, 0, 0
|
total, passing, failing = 0, 0, 0
|
||||||
|
|
||||||
if self.agentchecks.exists():
|
if self.agentchecks.exists(): # type: ignore
|
||||||
for i in self.agentchecks.all():
|
for i in self.agentchecks.all(): # type: ignore
|
||||||
total += 1
|
total += 1
|
||||||
if i.status == "passing":
|
if i.status == "passing":
|
||||||
passing += 1
|
passing += 1
|
||||||
@@ -227,6 +241,7 @@ class Agent(BaseAuditModel):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
||||||
return [x["Version"] for x in comp_sys_prod if "Version" in x][0]
|
return [x["Version"] for x in comp_sys_prod if "Version" in x][0]
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
@@ -256,33 +271,90 @@ class Agent(BaseAuditModel):
|
|||||||
except:
|
except:
|
||||||
return ["unknown disk"]
|
return ["unknown disk"]
|
||||||
|
|
||||||
|
def run_script(
|
||||||
|
self,
|
||||||
|
scriptpk: int,
|
||||||
|
args: list[str] = [],
|
||||||
|
timeout: int = 120,
|
||||||
|
full: bool = False,
|
||||||
|
wait: bool = False,
|
||||||
|
run_on_any: bool = False,
|
||||||
|
) -> Any:
|
||||||
|
|
||||||
|
from scripts.models import Script
|
||||||
|
|
||||||
|
script = Script.objects.get(pk=scriptpk)
|
||||||
|
data = {
|
||||||
|
"func": "runscriptfull" if full else "runscript",
|
||||||
|
"timeout": timeout,
|
||||||
|
"script_args": args,
|
||||||
|
"payload": {
|
||||||
|
"code": script.code,
|
||||||
|
"shell": script.shell,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
running_agent = self
|
||||||
|
if run_on_any:
|
||||||
|
nats_ping = {"func": "ping"}
|
||||||
|
|
||||||
|
# try on self first
|
||||||
|
r = asyncio.run(self.nats_cmd(nats_ping, timeout=1))
|
||||||
|
|
||||||
|
if r == "pong":
|
||||||
|
running_agent = self
|
||||||
|
else:
|
||||||
|
online = [
|
||||||
|
agent
|
||||||
|
for agent in Agent.objects.only(
|
||||||
|
"pk", "last_seen", "overdue_time", "offline_time"
|
||||||
|
)
|
||||||
|
if agent.status == "online"
|
||||||
|
]
|
||||||
|
|
||||||
|
for agent in online:
|
||||||
|
r = asyncio.run(agent.nats_cmd(nats_ping, timeout=1))
|
||||||
|
if r == "pong":
|
||||||
|
running_agent = agent
|
||||||
|
break
|
||||||
|
|
||||||
|
if running_agent.pk == self.pk:
|
||||||
|
return "Unable to find an online agent"
|
||||||
|
|
||||||
|
if wait:
|
||||||
|
return asyncio.run(running_agent.nats_cmd(data, timeout=timeout, wait=True))
|
||||||
|
else:
|
||||||
|
asyncio.run(running_agent.nats_cmd(data, wait=False))
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
# auto approves updates
|
# auto approves updates
|
||||||
def approve_updates(self):
|
def approve_updates(self):
|
||||||
patch_policy = self.get_patch_policy()
|
patch_policy = self.get_patch_policy()
|
||||||
|
|
||||||
updates = list()
|
updates = list()
|
||||||
if patch_policy.critical == "approve":
|
if patch_policy.critical == "approve":
|
||||||
updates += self.winupdates.filter(
|
updates += self.winupdates.filter( # type: ignore
|
||||||
severity="Critical", installed=False
|
severity="Critical", installed=False
|
||||||
).exclude(action="approve")
|
).exclude(action="approve")
|
||||||
|
|
||||||
if patch_policy.important == "approve":
|
if patch_policy.important == "approve":
|
||||||
updates += self.winupdates.filter(
|
updates += self.winupdates.filter( # type: ignore
|
||||||
severity="Important", installed=False
|
severity="Important", installed=False
|
||||||
).exclude(action="approve")
|
).exclude(action="approve")
|
||||||
|
|
||||||
if patch_policy.moderate == "approve":
|
if patch_policy.moderate == "approve":
|
||||||
updates += self.winupdates.filter(
|
updates += self.winupdates.filter( # type: ignore
|
||||||
severity="Moderate", installed=False
|
severity="Moderate", installed=False
|
||||||
).exclude(action="approve")
|
).exclude(action="approve")
|
||||||
|
|
||||||
if patch_policy.low == "approve":
|
if patch_policy.low == "approve":
|
||||||
updates += self.winupdates.filter(severity="Low", installed=False).exclude(
|
updates += self.winupdates.filter(severity="Low", installed=False).exclude( # type: ignore
|
||||||
action="approve"
|
action="approve"
|
||||||
)
|
)
|
||||||
|
|
||||||
if patch_policy.other == "approve":
|
if patch_policy.other == "approve":
|
||||||
updates += self.winupdates.filter(severity="", installed=False).exclude(
|
updates += self.winupdates.filter(severity="", installed=False).exclude( # type: ignore
|
||||||
action="approve"
|
action="approve"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -297,7 +369,7 @@ class Agent(BaseAuditModel):
|
|||||||
site = self.site
|
site = self.site
|
||||||
core_settings = CoreSettings.objects.first()
|
core_settings = CoreSettings.objects.first()
|
||||||
patch_policy = None
|
patch_policy = None
|
||||||
agent_policy = self.winupdatepolicy.get()
|
agent_policy = self.winupdatepolicy.get() # type: ignore
|
||||||
|
|
||||||
if self.monitoring_type == "server":
|
if self.monitoring_type == "server":
|
||||||
# check agent policy first which should override client or site policy
|
# check agent policy first which should override client or site policy
|
||||||
@@ -382,32 +454,132 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
return patch_policy
|
return patch_policy
|
||||||
|
|
||||||
# clear is used to delete managed policy checks from agent
|
def get_approved_update_guids(self) -> list[str]:
|
||||||
# parent_checks specifies a list of checks to delete from agent with matching parent_check field
|
return list(
|
||||||
def generate_checks_from_policies(self, clear=False):
|
self.winupdates.filter(action="approve", installed=False).values_list( # type: ignore
|
||||||
|
"guid", flat=True
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# returns alert template assigned in the following order: policy, site, client, global
|
||||||
|
# will return None if nothing is found
|
||||||
|
def get_alert_template(self) -> Union[AlertTemplate, None]:
|
||||||
|
|
||||||
|
site = self.site
|
||||||
|
client = self.client
|
||||||
|
core = CoreSettings.objects.first()
|
||||||
|
|
||||||
|
templates = list()
|
||||||
|
# check if alert template is on a policy assigned to agent
|
||||||
|
if (
|
||||||
|
self.policy
|
||||||
|
and self.policy.alert_template
|
||||||
|
and self.policy.alert_template.is_active
|
||||||
|
):
|
||||||
|
templates.append(self.policy.alert_template)
|
||||||
|
|
||||||
|
# check if policy with alert template is assigned to the site
|
||||||
|
if (
|
||||||
|
self.monitoring_type == "server"
|
||||||
|
and site.server_policy
|
||||||
|
and site.server_policy.alert_template
|
||||||
|
and site.server_policy.alert_template.is_active
|
||||||
|
):
|
||||||
|
templates.append(site.server_policy.alert_template)
|
||||||
|
if (
|
||||||
|
self.monitoring_type == "workstation"
|
||||||
|
and site.workstation_policy
|
||||||
|
and site.workstation_policy.alert_template
|
||||||
|
and site.workstation_policy.alert_template.is_active
|
||||||
|
):
|
||||||
|
templates.append(site.workstation_policy.alert_template)
|
||||||
|
|
||||||
|
# check if alert template is assigned to site
|
||||||
|
if site.alert_template and site.alert_template.is_active:
|
||||||
|
templates.append(site.alert_template)
|
||||||
|
|
||||||
|
# check if policy with alert template is assigned to the client
|
||||||
|
if (
|
||||||
|
self.monitoring_type == "server"
|
||||||
|
and client.server_policy
|
||||||
|
and client.server_policy.alert_template
|
||||||
|
and client.server_policy.alert_template.is_active
|
||||||
|
):
|
||||||
|
templates.append(client.server_policy.alert_template)
|
||||||
|
if (
|
||||||
|
self.monitoring_type == "workstation"
|
||||||
|
and client.workstation_policy
|
||||||
|
and client.workstation_policy.alert_template
|
||||||
|
and client.workstation_policy.alert_template.is_active
|
||||||
|
):
|
||||||
|
templates.append(client.workstation_policy.alert_template)
|
||||||
|
|
||||||
|
# check if alert template is on client and return
|
||||||
|
if client.alert_template and client.alert_template.is_active:
|
||||||
|
templates.append(client.alert_template)
|
||||||
|
|
||||||
|
# check if alert template is applied globally and return
|
||||||
|
if core.alert_template and core.alert_template.is_active:
|
||||||
|
templates.append(core.alert_template)
|
||||||
|
|
||||||
|
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
||||||
|
if (
|
||||||
|
self.monitoring_type == "server"
|
||||||
|
and core.server_policy
|
||||||
|
and core.server_policy.alert_template
|
||||||
|
and core.server_policy.alert_template.is_active
|
||||||
|
):
|
||||||
|
templates.append(core.server_policy.alert_template)
|
||||||
|
if (
|
||||||
|
self.monitoring_type == "workstation"
|
||||||
|
and core.workstation_policy
|
||||||
|
and core.workstation_policy.alert_template
|
||||||
|
and core.workstation_policy.alert_template.is_active
|
||||||
|
):
|
||||||
|
templates.append(core.workstation_policy.alert_template)
|
||||||
|
|
||||||
|
# go through the templates and return the first one that isn't excluded
|
||||||
|
for template in templates:
|
||||||
|
# check if client, site, or agent has been excluded from template
|
||||||
|
if (
|
||||||
|
client.pk
|
||||||
|
in template.excluded_clients.all().values_list("pk", flat=True)
|
||||||
|
or site.pk in template.excluded_sites.all().values_list("pk", flat=True)
|
||||||
|
or self.pk
|
||||||
|
in template.excluded_agents.all()
|
||||||
|
.only("pk")
|
||||||
|
.values_list("pk", flat=True)
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# check if template is excluding desktops
|
||||||
|
elif (
|
||||||
|
self.monitoring_type == "workstation" and template.exclude_workstations
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# check if template is excluding servers
|
||||||
|
elif self.monitoring_type == "server" and template.exclude_servers:
|
||||||
|
continue
|
||||||
|
|
||||||
|
else:
|
||||||
|
return template
|
||||||
|
|
||||||
|
# no alert templates found or agent has been excluded
|
||||||
|
return None
|
||||||
|
|
||||||
|
def generate_checks_from_policies(self):
|
||||||
from automation.models import Policy
|
from automation.models import Policy
|
||||||
|
|
||||||
# Clear agent checks managed by policy
|
|
||||||
if clear:
|
|
||||||
self.agentchecks.filter(managed_by_policy=True).delete()
|
|
||||||
|
|
||||||
# Clear agent checks that have overriden_by_policy set
|
# Clear agent checks that have overriden_by_policy set
|
||||||
self.agentchecks.update(overriden_by_policy=False)
|
self.agentchecks.update(overriden_by_policy=False) # type: ignore
|
||||||
|
|
||||||
# Generate checks based on policies
|
# Generate checks based on policies
|
||||||
Policy.generate_policy_checks(self)
|
Policy.generate_policy_checks(self)
|
||||||
|
|
||||||
# clear is used to delete managed policy tasks from agent
|
def generate_tasks_from_policies(self):
|
||||||
# parent_tasks specifies a list of tasks to delete from agent with matching parent_task field
|
|
||||||
def generate_tasks_from_policies(self, clear=False):
|
|
||||||
from autotasks.tasks import delete_win_task_schedule
|
|
||||||
from automation.models import Policy
|
from automation.models import Policy
|
||||||
|
|
||||||
# Clear agent tasks managed by policy
|
|
||||||
if clear:
|
|
||||||
for task in self.autotasks.filter(managed_by_policy=True):
|
|
||||||
delete_win_task_schedule.delay(task.pk)
|
|
||||||
|
|
||||||
# Generate tasks based on policies
|
# Generate tasks based on policies
|
||||||
Policy.generate_policy_tasks(self)
|
Policy.generate_policy_tasks(self)
|
||||||
|
|
||||||
@@ -435,7 +607,7 @@ class Agent(BaseAuditModel):
|
|||||||
except Exception:
|
except Exception:
|
||||||
return "err"
|
return "err"
|
||||||
|
|
||||||
async def nats_cmd(self, data, timeout=30, wait=True):
|
async def nats_cmd(self, data: dict, timeout: int = 30, wait: bool = True):
|
||||||
nc = NATS()
|
nc = NATS()
|
||||||
options = {
|
options = {
|
||||||
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||||
@@ -457,7 +629,7 @@ class Agent(BaseAuditModel):
|
|||||||
except ErrTimeout:
|
except ErrTimeout:
|
||||||
ret = "timeout"
|
ret = "timeout"
|
||||||
else:
|
else:
|
||||||
ret = msgpack.loads(msg.data)
|
ret = msgpack.loads(msg.data) # type: ignore
|
||||||
|
|
||||||
await nc.close()
|
await nc.close()
|
||||||
return ret
|
return ret
|
||||||
@@ -466,77 +638,6 @@ class Agent(BaseAuditModel):
|
|||||||
await nc.flush()
|
await nc.flush()
|
||||||
await nc.close()
|
await nc.close()
|
||||||
|
|
||||||
def salt_api_cmd(self, **kwargs):
|
|
||||||
|
|
||||||
# salt should always timeout first before the requests' timeout
|
|
||||||
try:
|
|
||||||
timeout = kwargs["timeout"]
|
|
||||||
except KeyError:
|
|
||||||
# default timeout
|
|
||||||
timeout = 15
|
|
||||||
salt_timeout = 12
|
|
||||||
else:
|
|
||||||
if timeout < 8:
|
|
||||||
timeout = 8
|
|
||||||
salt_timeout = 5
|
|
||||||
else:
|
|
||||||
salt_timeout = timeout - 3
|
|
||||||
|
|
||||||
json = {
|
|
||||||
"client": "local",
|
|
||||||
"tgt": self.salt_id,
|
|
||||||
"fun": kwargs["func"],
|
|
||||||
"timeout": salt_timeout,
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
|
|
||||||
if "arg" in kwargs:
|
|
||||||
json.update({"arg": kwargs["arg"]})
|
|
||||||
if "kwargs" in kwargs:
|
|
||||||
json.update({"kwarg": kwargs["kwargs"]})
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = requests.post(
|
|
||||||
f"http://{settings.SALT_HOST}:8123/run",
|
|
||||||
json=[json],
|
|
||||||
timeout=timeout,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
return "timeout"
|
|
||||||
|
|
||||||
try:
|
|
||||||
ret = resp.json()["return"][0][self.salt_id]
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"{self.salt_id}: {e}")
|
|
||||||
return "error"
|
|
||||||
else:
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def salt_api_async(self, **kwargs):
|
|
||||||
|
|
||||||
json = {
|
|
||||||
"client": "local_async",
|
|
||||||
"tgt": self.salt_id,
|
|
||||||
"fun": kwargs["func"],
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
|
|
||||||
if "arg" in kwargs:
|
|
||||||
json.update({"arg": kwargs["arg"]})
|
|
||||||
if "kwargs" in kwargs:
|
|
||||||
json.update({"kwarg": kwargs["kwargs"]})
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
|
||||||
except Exception:
|
|
||||||
return "timeout"
|
|
||||||
|
|
||||||
return resp
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(agent):
|
def serialize(agent):
|
||||||
# serializes the agent and returns json
|
# serializes the agent and returns json
|
||||||
@@ -547,41 +648,15 @@ class Agent(BaseAuditModel):
|
|||||||
del ret["client"]
|
del ret["client"]
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def salt_batch_async(**kwargs):
|
|
||||||
assert isinstance(kwargs["minions"], list)
|
|
||||||
|
|
||||||
json = {
|
|
||||||
"client": "local_async",
|
|
||||||
"tgt_type": "list",
|
|
||||||
"tgt": kwargs["minions"],
|
|
||||||
"fun": kwargs["func"],
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
|
|
||||||
if "arg" in kwargs:
|
|
||||||
json.update({"arg": kwargs["arg"]})
|
|
||||||
if "kwargs" in kwargs:
|
|
||||||
json.update({"kwarg": kwargs["kwargs"]})
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
|
||||||
except Exception:
|
|
||||||
return "timeout"
|
|
||||||
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def delete_superseded_updates(self):
|
def delete_superseded_updates(self):
|
||||||
try:
|
try:
|
||||||
pks = [] # list of pks to delete
|
pks = [] # list of pks to delete
|
||||||
kbs = list(self.winupdates.values_list("kb", flat=True))
|
kbs = list(self.winupdates.values_list("kb", flat=True)) # type: ignore
|
||||||
d = Counter(kbs)
|
d = Counter(kbs)
|
||||||
dupes = [k for k, v in d.items() if v > 1]
|
dupes = [k for k, v in d.items() if v > 1]
|
||||||
|
|
||||||
for dupe in dupes:
|
for dupe in dupes:
|
||||||
titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True)
|
titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True) # type: ignore
|
||||||
# extract the version from the title and sort from oldest to newest
|
# extract the version from the title and sort from oldest to newest
|
||||||
# skip if no version info is available therefore nothing to parse
|
# skip if no version info is available therefore nothing to parse
|
||||||
try:
|
try:
|
||||||
@@ -594,24 +669,24 @@ class Agent(BaseAuditModel):
|
|||||||
continue
|
continue
|
||||||
# append all but the latest version to our list of pks to delete
|
# append all but the latest version to our list of pks to delete
|
||||||
for ver in sorted_vers[:-1]:
|
for ver in sorted_vers[:-1]:
|
||||||
q = self.winupdates.filter(kb=dupe).filter(title__contains=ver)
|
q = self.winupdates.filter(kb=dupe).filter(title__contains=ver) # type: ignore
|
||||||
pks.append(q.first().pk)
|
pks.append(q.first().pk)
|
||||||
|
|
||||||
pks = list(set(pks))
|
pks = list(set(pks))
|
||||||
self.winupdates.filter(pk__in=pks).delete()
|
self.winupdates.filter(pk__in=pks).delete() # type: ignore
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# define how the agent should handle pending actions
|
# define how the agent should handle pending actions
|
||||||
def handle_pending_actions(self):
|
def handle_pending_actions(self):
|
||||||
pending_actions = self.pendingactions.filter(status="pending")
|
pending_actions = self.pendingactions.filter(status="pending") # type: ignore
|
||||||
|
|
||||||
for action in pending_actions:
|
for action in pending_actions:
|
||||||
if action.action_type == "taskaction":
|
if action.action_type == "taskaction":
|
||||||
from autotasks.tasks import (
|
from autotasks.tasks import (
|
||||||
create_win_task_schedule,
|
create_win_task_schedule,
|
||||||
enable_or_disable_win_task,
|
|
||||||
delete_win_task_schedule,
|
delete_win_task_schedule,
|
||||||
|
enable_or_disable_win_task,
|
||||||
)
|
)
|
||||||
|
|
||||||
task_id = action.details["task_id"]
|
task_id = action.details["task_id"]
|
||||||
@@ -625,73 +700,80 @@ class Agent(BaseAuditModel):
|
|||||||
elif action.details["action"] == "taskdelete":
|
elif action.details["action"] == "taskdelete":
|
||||||
delete_win_task_schedule.delay(task_id, pending_action=action.id)
|
delete_win_task_schedule.delay(task_id, pending_action=action.id)
|
||||||
|
|
||||||
|
# for clearing duplicate pending actions on agent
|
||||||
|
def remove_matching_pending_task_actions(self, task_id):
|
||||||
|
# remove any other pending actions on agent with same task_id
|
||||||
|
for action in self.pendingactions.exclude(status="completed"): # type: ignore
|
||||||
|
if action.details["task_id"] == task_id:
|
||||||
|
action.delete()
|
||||||
|
|
||||||
class AgentOutage(models.Model):
|
def should_create_alert(self, alert_template):
|
||||||
agent = models.ForeignKey(
|
return (
|
||||||
Agent,
|
self.overdue_dashboard_alert
|
||||||
related_name="agentoutages",
|
or self.overdue_email_alert
|
||||||
null=True,
|
or self.overdue_text_alert
|
||||||
blank=True,
|
or (
|
||||||
on_delete=models.CASCADE,
|
alert_template
|
||||||
)
|
and (
|
||||||
outage_time = models.DateTimeField(auto_now_add=True)
|
alert_template.agent_always_alert
|
||||||
recovery_time = models.DateTimeField(null=True, blank=True)
|
or alert_template.agent_always_email
|
||||||
outage_email_sent = models.BooleanField(default=False)
|
or alert_template.agent_always_text
|
||||||
outage_sms_sent = models.BooleanField(default=False)
|
)
|
||||||
recovery_email_sent = models.BooleanField(default=False)
|
)
|
||||||
recovery_sms_sent = models.BooleanField(default=False)
|
)
|
||||||
|
|
||||||
@property
|
|
||||||
def is_active(self):
|
|
||||||
return False if self.recovery_time else True
|
|
||||||
|
|
||||||
def send_outage_email(self):
|
def send_outage_email(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
|
alert_template = self.get_alert_template()
|
||||||
CORE.send_mail(
|
CORE.send_mail(
|
||||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue",
|
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||||
(
|
(
|
||||||
f"Data has not been received from client {self.agent.client.name}, "
|
f"Data has not been received from client {self.client.name}, "
|
||||||
f"site {self.agent.site.name}, "
|
f"site {self.site.name}, "
|
||||||
f"agent {self.agent.hostname} "
|
f"agent {self.hostname} "
|
||||||
"within the expected time."
|
"within the expected time."
|
||||||
),
|
),
|
||||||
|
alert_template=alert_template,
|
||||||
)
|
)
|
||||||
|
|
||||||
def send_recovery_email(self):
|
def send_recovery_email(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
|
alert_template = self.get_alert_template()
|
||||||
CORE.send_mail(
|
CORE.send_mail(
|
||||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received",
|
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||||
(
|
(
|
||||||
f"Data has been received from client {self.agent.client.name}, "
|
f"Data has been received from client {self.client.name}, "
|
||||||
f"site {self.agent.site.name}, "
|
f"site {self.site.name}, "
|
||||||
f"agent {self.agent.hostname} "
|
f"agent {self.hostname} "
|
||||||
"after an interruption in data transmission."
|
"after an interruption in data transmission."
|
||||||
),
|
),
|
||||||
|
alert_template=alert_template,
|
||||||
)
|
)
|
||||||
|
|
||||||
def send_outage_sms(self):
|
def send_outage_sms(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
|
alert_template = self.get_alert_template()
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
CORE.send_sms(
|
CORE.send_sms(
|
||||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue"
|
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||||
|
alert_template=alert_template,
|
||||||
)
|
)
|
||||||
|
|
||||||
def send_recovery_sms(self):
|
def send_recovery_sms(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
|
alert_template = self.get_alert_template()
|
||||||
CORE.send_sms(
|
CORE.send_sms(
|
||||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received"
|
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||||
|
alert_template=alert_template,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.agent.hostname
|
|
||||||
|
|
||||||
|
|
||||||
RECOVERY_CHOICES = [
|
RECOVERY_CHOICES = [
|
||||||
("salt", "Salt"),
|
("salt", "Salt"),
|
||||||
|
|||||||
@@ -1,13 +1,11 @@
|
|||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
from rest_framework.fields import ReadOnlyField
|
|
||||||
|
from clients.serializers import ClientSerializer
|
||||||
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from .models import Agent, Note
|
from .models import Agent, Note
|
||||||
|
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
|
||||||
from clients.serializers import ClientSerializer
|
|
||||||
|
|
||||||
|
|
||||||
class AgentSerializer(serializers.ModelSerializer):
|
class AgentSerializer(serializers.ModelSerializer):
|
||||||
# for vue
|
# for vue
|
||||||
@@ -34,6 +32,17 @@ class AgentSerializer(serializers.ModelSerializer):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class AgentOverdueActionSerializer(serializers.ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Agent
|
||||||
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"overdue_email_alert",
|
||||||
|
"overdue_text_alert",
|
||||||
|
"overdue_dashboard_alert",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class AgentTableSerializer(serializers.ModelSerializer):
|
class AgentTableSerializer(serializers.ModelSerializer):
|
||||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||||
pending_actions = serializers.SerializerMethodField()
|
pending_actions = serializers.SerializerMethodField()
|
||||||
@@ -42,22 +51,51 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
|||||||
last_seen = serializers.SerializerMethodField()
|
last_seen = serializers.SerializerMethodField()
|
||||||
client_name = serializers.ReadOnlyField(source="client.name")
|
client_name = serializers.ReadOnlyField(source="client.name")
|
||||||
site_name = serializers.ReadOnlyField(source="site.name")
|
site_name = serializers.ReadOnlyField(source="site.name")
|
||||||
|
logged_username = serializers.SerializerMethodField()
|
||||||
|
italic = serializers.SerializerMethodField()
|
||||||
|
policy = serializers.ReadOnlyField(source="policy.id")
|
||||||
|
alert_template = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
def get_alert_template(self, obj):
|
||||||
|
alert_template = obj.get_alert_template()
|
||||||
|
|
||||||
|
if not alert_template:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"name": alert_template.name,
|
||||||
|
"always_email": alert_template.agent_always_email,
|
||||||
|
"always_text": alert_template.agent_always_text,
|
||||||
|
"always_alert": alert_template.agent_always_alert,
|
||||||
|
}
|
||||||
|
|
||||||
def get_pending_actions(self, obj):
|
def get_pending_actions(self, obj):
|
||||||
return obj.pendingactions.filter(status="pending").count()
|
return obj.pendingactions.filter(status="pending").count()
|
||||||
|
|
||||||
def get_last_seen(self, obj):
|
def get_last_seen(self, obj) -> str:
|
||||||
if obj.time_zone is not None:
|
if obj.time_zone is not None:
|
||||||
agent_tz = pytz.timezone(obj.time_zone)
|
agent_tz = pytz.timezone(obj.time_zone)
|
||||||
else:
|
else:
|
||||||
agent_tz = self.context["default_tz"]
|
agent_tz = self.context["default_tz"]
|
||||||
|
|
||||||
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M:%S")
|
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M")
|
||||||
|
|
||||||
|
def get_logged_username(self, obj) -> str:
|
||||||
|
if obj.logged_in_username == "None" and obj.status == "online":
|
||||||
|
return obj.last_logged_in_user
|
||||||
|
elif obj.logged_in_username != "None":
|
||||||
|
return obj.logged_in_username
|
||||||
|
else:
|
||||||
|
return "-"
|
||||||
|
|
||||||
|
def get_italic(self, obj) -> bool:
|
||||||
|
return obj.logged_in_username == "None" and obj.status == "online"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Agent
|
model = Agent
|
||||||
fields = [
|
fields = [
|
||||||
"id",
|
"id",
|
||||||
|
"alert_template",
|
||||||
"hostname",
|
"hostname",
|
||||||
"agent_id",
|
"agent_id",
|
||||||
"site_name",
|
"site_name",
|
||||||
@@ -70,12 +108,14 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
|||||||
"status",
|
"status",
|
||||||
"overdue_text_alert",
|
"overdue_text_alert",
|
||||||
"overdue_email_alert",
|
"overdue_email_alert",
|
||||||
|
"overdue_dashboard_alert",
|
||||||
"last_seen",
|
"last_seen",
|
||||||
"boot_time",
|
"boot_time",
|
||||||
"checks",
|
"checks",
|
||||||
"logged_in_username",
|
|
||||||
"last_logged_in_user",
|
|
||||||
"maintenance_mode",
|
"maintenance_mode",
|
||||||
|
"logged_username",
|
||||||
|
"italic",
|
||||||
|
"policy",
|
||||||
]
|
]
|
||||||
depth = 2
|
depth = 2
|
||||||
|
|
||||||
@@ -101,10 +141,12 @@ class AgentEditSerializer(serializers.ModelSerializer):
|
|||||||
"timezone",
|
"timezone",
|
||||||
"check_interval",
|
"check_interval",
|
||||||
"overdue_time",
|
"overdue_time",
|
||||||
|
"offline_time",
|
||||||
"overdue_text_alert",
|
"overdue_text_alert",
|
||||||
"overdue_email_alert",
|
"overdue_email_alert",
|
||||||
"all_timezones",
|
"all_timezones",
|
||||||
"winupdatepolicy",
|
"winupdatepolicy",
|
||||||
|
"policy",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,119 +1,89 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from loguru import logger
|
import datetime as dt
|
||||||
from time import sleep
|
|
||||||
import random
|
import random
|
||||||
import requests
|
from time import sleep
|
||||||
from packaging import version as pyver
|
from typing import Union
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from scripts.models import Script
|
from django.utils import timezone as djangotime
|
||||||
|
from loguru import logger
|
||||||
|
from packaging import version as pyver
|
||||||
|
|
||||||
from tacticalrmm.celery import app
|
from agents.models import Agent
|
||||||
from agents.models import Agent, AgentOutage
|
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
from logs.models import PendingAction
|
from logs.models import PendingAction
|
||||||
|
from scripts.models import Script
|
||||||
|
from tacticalrmm.celery import app
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
def _check_agent_service(pk: int) -> None:
|
|
||||||
agent = Agent.objects.get(pk=pk)
|
|
||||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=2))
|
|
||||||
if r == "pong":
|
|
||||||
logger.info(
|
|
||||||
f"Detected crashed tacticalagent service on {agent.hostname}, attempting recovery"
|
|
||||||
)
|
|
||||||
data = {"func": "recover", "payload": {"mode": "tacagent"}}
|
|
||||||
asyncio.run(agent.nats_cmd(data, wait=False))
|
|
||||||
|
|
||||||
|
|
||||||
def _check_in_full(pk: int) -> None:
|
|
||||||
agent = Agent.objects.get(pk=pk)
|
|
||||||
asyncio.run(agent.nats_cmd({"func": "checkinfull"}, wait=False))
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def check_in_task() -> None:
|
|
||||||
q = Agent.objects.only("pk", "version")
|
|
||||||
agents: List[int] = [
|
|
||||||
i.pk for i in q if pyver.parse(i.version) >= pyver.parse("1.1.12")
|
|
||||||
]
|
|
||||||
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
|
||||||
for chunk in chunks:
|
|
||||||
for pk in chunk:
|
|
||||||
_check_in_full(pk)
|
|
||||||
sleep(0.1)
|
|
||||||
rand = random.randint(3, 7)
|
|
||||||
sleep(rand)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def monitor_agents_task() -> None:
|
|
||||||
q = Agent.objects.all()
|
|
||||||
agents: List[int] = [i.pk for i in q if i.has_nats and i.status != "online"]
|
|
||||||
for agent in agents:
|
|
||||||
_check_agent_service(agent)
|
|
||||||
|
|
||||||
|
|
||||||
def agent_update(pk: int) -> str:
|
def agent_update(pk: int) -> str:
|
||||||
agent = Agent.objects.get(pk=pk)
|
agent = Agent.objects.get(pk=pk)
|
||||||
|
|
||||||
|
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||||
|
logger.warning(
|
||||||
|
f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to auto update."
|
||||||
|
)
|
||||||
|
return "not supported"
|
||||||
|
|
||||||
# skip if we can't determine the arch
|
# skip if we can't determine the arch
|
||||||
if agent.arch is None:
|
if agent.arch is None:
|
||||||
logger.warning(f"Unable to determine arch on {agent.hostname}. Skipping.")
|
logger.warning(
|
||||||
|
f"Unable to determine arch on {agent.hostname}. Skipping agent update."
|
||||||
|
)
|
||||||
return "noarch"
|
return "noarch"
|
||||||
|
|
||||||
version = settings.LATEST_AGENT_VER
|
# removed sqlite in 1.4.0 to get rid of cgo dependency
|
||||||
url = agent.winagent_dl
|
# 1.3.0 has migration func to move from sqlite to win registry, so force an upgrade to 1.3.0 if old agent
|
||||||
inno = agent.win_inno_exe
|
if pyver.parse(agent.version) >= pyver.parse("1.3.0"):
|
||||||
|
version = settings.LATEST_AGENT_VER
|
||||||
|
url = agent.winagent_dl
|
||||||
|
inno = agent.win_inno_exe
|
||||||
|
else:
|
||||||
|
version = "1.3.0"
|
||||||
|
inno = (
|
||||||
|
"winagent-v1.3.0.exe" if agent.arch == "64" else "winagent-v1.3.0-x86.exe"
|
||||||
|
)
|
||||||
|
url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}"
|
||||||
|
|
||||||
if agent.has_nats:
|
if agent.pendingactions.filter(
|
||||||
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
action_type="agentupdate", status="pending"
|
||||||
if agent.pendingactions.filter(
|
).exists():
|
||||||
action_type="agentupdate", status="pending"
|
agent.pendingactions.filter(
|
||||||
).exists():
|
action_type="agentupdate", status="pending"
|
||||||
action = agent.pendingactions.filter(
|
).delete()
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).last()
|
|
||||||
if pyver.parse(action.details["version"]) < pyver.parse(version):
|
|
||||||
action.delete()
|
|
||||||
else:
|
|
||||||
return "pending"
|
|
||||||
|
|
||||||
PendingAction.objects.create(
|
PendingAction.objects.create(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
action_type="agentupdate",
|
action_type="agentupdate",
|
||||||
details={
|
details={
|
||||||
"url": url,
|
"url": url,
|
||||||
"version": version,
|
"version": version,
|
||||||
"inno": inno,
|
"inno": inno,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
nats_data = {
|
|
||||||
"func": "agentupdate",
|
|
||||||
"payload": {
|
|
||||||
"url": url,
|
|
||||||
"version": version,
|
|
||||||
"inno": inno,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
|
||||||
|
|
||||||
return "created"
|
nats_data = {
|
||||||
|
"func": "agentupdate",
|
||||||
return "not supported"
|
"payload": {
|
||||||
|
"url": url,
|
||||||
|
"version": version,
|
||||||
|
"inno": inno,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||||
|
return "created"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def send_agent_update_task(pks: List[int], version: str) -> None:
|
def send_agent_update_task(pks: list[int]) -> None:
|
||||||
q = Agent.objects.filter(pk__in=pks)
|
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||||
agents: List[int] = [
|
for chunk in chunks:
|
||||||
i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)
|
for pk in chunk:
|
||||||
]
|
agent_update(pk)
|
||||||
|
sleep(0.05)
|
||||||
for pk in agents:
|
sleep(4)
|
||||||
agent_update(pk)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
@@ -123,200 +93,140 @@ def auto_self_agent_update_task() -> None:
|
|||||||
return
|
return
|
||||||
|
|
||||||
q = Agent.objects.only("pk", "version")
|
q = Agent.objects.only("pk", "version")
|
||||||
pks: List[int] = [
|
pks: list[int] = [
|
||||||
i.pk
|
i.pk
|
||||||
for i in q
|
for i in q
|
||||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
]
|
]
|
||||||
|
|
||||||
for pk in pks:
|
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||||
agent_update(pk)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def sync_sysinfo_task():
|
|
||||||
agents = Agent.objects.all()
|
|
||||||
online = [
|
|
||||||
i
|
|
||||||
for i in agents
|
|
||||||
if pyver.parse(i.version) >= pyver.parse("1.1.3") and i.status == "online"
|
|
||||||
]
|
|
||||||
|
|
||||||
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
for agent in chunk:
|
for pk in chunk:
|
||||||
asyncio.run(agent.nats_cmd({"func": "sync"}, wait=False))
|
agent_update(pk)
|
||||||
sleep(0.1)
|
sleep(0.05)
|
||||||
rand = random.randint(3, 7)
|
sleep(4)
|
||||||
sleep(rand)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def sync_salt_modules_task(pk):
|
def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||||
agent = Agent.objects.get(pk=pk)
|
from alerts.models import Alert
|
||||||
r = agent.salt_api_cmd(timeout=35, func="saltutil.sync_modules")
|
|
||||||
# successful sync if new/charnged files: {'return': [{'MINION-15': ['modules.get_eventlog', 'modules.win_agent', 'etc...']}]}
|
|
||||||
# successful sync with no new/changed files: {'return': [{'MINION-15': []}]}
|
|
||||||
if r == "timeout" or r == "error":
|
|
||||||
return f"Unable to sync modules {agent.salt_id}"
|
|
||||||
|
|
||||||
return f"Successfully synced salt modules on {agent.hostname}"
|
alert = Alert.objects.get(pk=pk)
|
||||||
|
|
||||||
|
if not alert.email_sent:
|
||||||
@app.task
|
sleep(random.randint(1, 15))
|
||||||
def batch_sync_modules_task():
|
alert.agent.send_outage_email()
|
||||||
# sync modules, split into chunks of 50 agents to not overload salt
|
alert.email_sent = djangotime.now()
|
||||||
agents = Agent.objects.all()
|
alert.save(update_fields=["email_sent"])
|
||||||
online = [i.salt_id for i in agents]
|
|
||||||
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
|
||||||
for chunk in chunks:
|
|
||||||
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
|
|
||||||
sleep(10)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def uninstall_agent_task(salt_id, has_nats):
|
|
||||||
attempts = 0
|
|
||||||
error = False
|
|
||||||
|
|
||||||
if not has_nats:
|
|
||||||
while 1:
|
|
||||||
try:
|
|
||||||
|
|
||||||
r = requests.post(
|
|
||||||
f"http://{settings.SALT_HOST}:8123/run",
|
|
||||||
json=[
|
|
||||||
{
|
|
||||||
"client": "local",
|
|
||||||
"tgt": salt_id,
|
|
||||||
"fun": "win_agent.uninstall_agent",
|
|
||||||
"timeout": 8,
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
timeout=10,
|
|
||||||
)
|
|
||||||
ret = r.json()["return"][0][salt_id]
|
|
||||||
except Exception:
|
|
||||||
attempts += 1
|
|
||||||
else:
|
|
||||||
if ret != "ok":
|
|
||||||
attempts += 1
|
|
||||||
else:
|
|
||||||
attempts = 0
|
|
||||||
|
|
||||||
if attempts >= 10:
|
|
||||||
error = True
|
|
||||||
break
|
|
||||||
elif attempts == 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
if error:
|
|
||||||
logger.error(f"{salt_id} uninstall failed")
|
|
||||||
else:
|
else:
|
||||||
logger.info(f"{salt_id} was successfully uninstalled")
|
if alert_interval:
|
||||||
|
# send an email only if the last email sent is older than alert interval
|
||||||
try:
|
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||||
r = requests.post(
|
if alert.email_sent < delta:
|
||||||
f"http://{settings.SALT_HOST}:8123/run",
|
sleep(random.randint(1, 10))
|
||||||
json=[
|
alert.agent.send_outage_email()
|
||||||
{
|
alert.email_sent = djangotime.now()
|
||||||
"client": "wheel",
|
alert.save(update_fields=["email_sent"])
|
||||||
"fun": "key.delete",
|
|
||||||
"match": salt_id,
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
timeout=30,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
logger.error(f"{salt_id} unable to remove salt-key")
|
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def agent_outage_email_task(pk):
|
def agent_recovery_email_task(pk: int) -> str:
|
||||||
|
from alerts.models import Alert
|
||||||
|
|
||||||
sleep(random.randint(1, 15))
|
sleep(random.randint(1, 15))
|
||||||
outage = AgentOutage.objects.get(pk=pk)
|
alert = Alert.objects.get(pk=pk)
|
||||||
outage.send_outage_email()
|
alert.agent.send_recovery_email()
|
||||||
outage.outage_email_sent = True
|
alert.resolved_email_sent = djangotime.now()
|
||||||
outage.save(update_fields=["outage_email_sent"])
|
alert.save(update_fields=["resolved_email_sent"])
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def agent_recovery_email_task(pk):
|
def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||||
sleep(random.randint(1, 15))
|
from alerts.models import Alert
|
||||||
outage = AgentOutage.objects.get(pk=pk)
|
|
||||||
outage.send_recovery_email()
|
alert = Alert.objects.get(pk=pk)
|
||||||
outage.recovery_email_sent = True
|
|
||||||
outage.save(update_fields=["recovery_email_sent"])
|
if not alert.sms_sent:
|
||||||
|
sleep(random.randint(1, 15))
|
||||||
|
alert.agent.send_outage_sms()
|
||||||
|
alert.sms_sent = djangotime.now()
|
||||||
|
alert.save(update_fields=["sms_sent"])
|
||||||
|
else:
|
||||||
|
if alert_interval:
|
||||||
|
# send an sms only if the last sms sent is older than alert interval
|
||||||
|
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||||
|
if alert.sms_sent < delta:
|
||||||
|
sleep(random.randint(1, 10))
|
||||||
|
alert.agent.send_outage_sms()
|
||||||
|
alert.sms_sent = djangotime.now()
|
||||||
|
alert.save(update_fields=["sms_sent"])
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def agent_outage_sms_task(pk):
|
def agent_recovery_sms_task(pk: int) -> str:
|
||||||
|
from alerts.models import Alert
|
||||||
|
|
||||||
sleep(random.randint(1, 3))
|
sleep(random.randint(1, 3))
|
||||||
outage = AgentOutage.objects.get(pk=pk)
|
alert = Alert.objects.get(pk=pk)
|
||||||
outage.send_outage_sms()
|
alert.agent.send_recovery_sms()
|
||||||
outage.outage_sms_sent = True
|
alert.resolved_sms_sent = djangotime.now()
|
||||||
outage.save(update_fields=["outage_sms_sent"])
|
alert.save(update_fields=["resolved_sms_sent"])
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def agent_recovery_sms_task(pk):
|
def agent_outages_task() -> None:
|
||||||
sleep(random.randint(1, 3))
|
from alerts.models import Alert
|
||||||
outage = AgentOutage.objects.get(pk=pk)
|
|
||||||
outage.send_recovery_sms()
|
|
||||||
outage.recovery_sms_sent = True
|
|
||||||
outage.save(update_fields=["recovery_sms_sent"])
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def agent_outages_task():
|
|
||||||
agents = Agent.objects.only(
|
agents = Agent.objects.only(
|
||||||
"pk", "last_seen", "overdue_time", "overdue_email_alert", "overdue_text_alert"
|
"pk",
|
||||||
|
"last_seen",
|
||||||
|
"offline_time",
|
||||||
|
"overdue_time",
|
||||||
|
"overdue_email_alert",
|
||||||
|
"overdue_text_alert",
|
||||||
|
"overdue_dashboard_alert",
|
||||||
)
|
)
|
||||||
|
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
if agent.status == "overdue":
|
||||||
if agent.status == "overdue":
|
Alert.handle_alert_failure(agent)
|
||||||
outages = AgentOutage.objects.filter(agent=agent)
|
|
||||||
if outages and outages.last().is_active:
|
|
||||||
continue
|
|
||||||
|
|
||||||
outage = AgentOutage(agent=agent)
|
|
||||||
outage.save()
|
|
||||||
|
|
||||||
# add a null check history to allow gaps in graph
|
|
||||||
for check in agent.agentchecks.all():
|
|
||||||
check.add_check_history(None)
|
|
||||||
|
|
||||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
|
||||||
agent_outage_email_task.delay(pk=outage.pk)
|
|
||||||
|
|
||||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
|
||||||
agent_outage_sms_task.delay(pk=outage.pk)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def install_salt_task(pk: int) -> None:
|
def handle_agent_recovery_task(pk: int) -> None:
|
||||||
sleep(20)
|
sleep(10)
|
||||||
agent = Agent.objects.get(pk=pk)
|
from agents.models import RecoveryAction
|
||||||
asyncio.run(agent.nats_cmd({"func": "installsalt"}, wait=False))
|
|
||||||
|
action = RecoveryAction.objects.get(pk=pk)
|
||||||
|
if action.mode == "command":
|
||||||
|
data = {"func": "recoverycmd", "recoverycommand": action.command}
|
||||||
|
else:
|
||||||
|
data = {"func": "recover", "payload": {"mode": action.mode}}
|
||||||
|
|
||||||
|
asyncio.run(action.agent.nats_cmd(data, wait=False))
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def run_script_email_results_task(
|
def run_script_email_results_task(
|
||||||
agentpk: int, scriptpk: int, nats_timeout: int, nats_data: dict, emails: List[str]
|
agentpk: int,
|
||||||
|
scriptpk: int,
|
||||||
|
nats_timeout: int,
|
||||||
|
emails: list[str],
|
||||||
|
args: list[str] = [],
|
||||||
):
|
):
|
||||||
agent = Agent.objects.get(pk=agentpk)
|
agent = Agent.objects.get(pk=agentpk)
|
||||||
script = Script.objects.get(pk=scriptpk)
|
script = Script.objects.get(pk=scriptpk)
|
||||||
nats_data["func"] = "runscriptfull"
|
r = agent.run_script(
|
||||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=nats_timeout))
|
scriptpk=script.pk, args=args, full=True, timeout=nats_timeout, wait=True
|
||||||
|
)
|
||||||
if r == "timeout":
|
if r == "timeout":
|
||||||
logger.error(f"{agent.hostname} timed out running script.")
|
logger.error(f"{agent.hostname} timed out running script.")
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -1,26 +1,121 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
from itertools import cycle
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from model_bakery import baker
|
|
||||||
from itertools import cycle
|
|
||||||
|
|
||||||
from django.test import TestCase, override_settings
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from model_bakery import baker
|
||||||
from logs.models import PendingAction
|
from packaging import version as pyver
|
||||||
|
|
||||||
|
from logs.models import PendingAction
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
from .serializers import AgentSerializer
|
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
|
||||||
from .models import Agent
|
|
||||||
from .tasks import (
|
|
||||||
agent_recovery_sms_task,
|
|
||||||
auto_self_agent_update_task,
|
|
||||||
sync_salt_modules_task,
|
|
||||||
batch_sync_modules_task,
|
|
||||||
)
|
|
||||||
from winupdate.models import WinUpdatePolicy
|
from winupdate.models import WinUpdatePolicy
|
||||||
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
|
from .models import Agent
|
||||||
|
from .serializers import AgentSerializer
|
||||||
|
from .tasks import auto_self_agent_update_task
|
||||||
|
|
||||||
|
|
||||||
|
class TestAgentsList(TacticalTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.authenticate()
|
||||||
|
self.setup_coresettings()
|
||||||
|
|
||||||
|
def test_agents_list(self):
|
||||||
|
url = "/agents/listagents/"
|
||||||
|
|
||||||
|
# 36 total agents
|
||||||
|
company1 = baker.make("clients.Client")
|
||||||
|
company2 = baker.make("clients.Client")
|
||||||
|
site1 = baker.make("clients.Site", client=company1)
|
||||||
|
site2 = baker.make("clients.Site", client=company1)
|
||||||
|
site3 = baker.make("clients.Site", client=company2)
|
||||||
|
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.online_agent", site=site1, monitoring_type="server", _quantity=15
|
||||||
|
)
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.online_agent",
|
||||||
|
site=site2,
|
||||||
|
monitoring_type="workstation",
|
||||||
|
_quantity=10,
|
||||||
|
)
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.online_agent",
|
||||||
|
site=site3,
|
||||||
|
monitoring_type="server",
|
||||||
|
_quantity=4,
|
||||||
|
)
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.online_agent",
|
||||||
|
site=site3,
|
||||||
|
monitoring_type="workstation",
|
||||||
|
_quantity=7,
|
||||||
|
)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"pagination": {
|
||||||
|
"rowsPerPage": 50,
|
||||||
|
"rowsNumber": None,
|
||||||
|
"sortBy": "hostname",
|
||||||
|
"descending": False,
|
||||||
|
"page": 1,
|
||||||
|
},
|
||||||
|
"monType": "mixed",
|
||||||
|
}
|
||||||
|
|
||||||
|
# test mixed
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data["total"], 36) # type: ignore
|
||||||
|
self.assertEqual(len(r.data["agents"]), 36) # type: ignore
|
||||||
|
|
||||||
|
# test servers
|
||||||
|
data["monType"] = "server"
|
||||||
|
data["pagination"]["rowsPerPage"] = 6
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data["total"], 19) # type: ignore
|
||||||
|
self.assertEqual(len(r.data["agents"]), 6) # type: ignore
|
||||||
|
|
||||||
|
# test workstations
|
||||||
|
data["monType"] = "server"
|
||||||
|
data["pagination"]["rowsPerPage"] = 6
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data["total"], 19) # type: ignore
|
||||||
|
self.assertEqual(len(r.data["agents"]), 6) # type: ignore
|
||||||
|
|
||||||
|
# test client1 mixed
|
||||||
|
data = {
|
||||||
|
"pagination": {
|
||||||
|
"rowsPerPage": 3,
|
||||||
|
"rowsNumber": None,
|
||||||
|
"sortBy": "hostname",
|
||||||
|
"descending": False,
|
||||||
|
"page": 1,
|
||||||
|
},
|
||||||
|
"monType": "mixed",
|
||||||
|
"clientPK": company1.pk, # type: ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data["total"], 25) # type: ignore
|
||||||
|
self.assertEqual(len(r.data["agents"]), 3) # type: ignore
|
||||||
|
|
||||||
|
# test site3 workstations
|
||||||
|
del data["clientPK"]
|
||||||
|
data["monType"] = "workstation"
|
||||||
|
data["sitePK"] = site3.pk # type: ignore
|
||||||
|
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data["total"], 7) # type: ignore
|
||||||
|
self.assertEqual(len(r.data["agents"]), 3) # type: ignore
|
||||||
|
|
||||||
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
|
||||||
class TestAgentViews(TacticalTestCase):
|
class TestAgentViews(TacticalTestCase):
|
||||||
@@ -70,12 +165,34 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
@patch("agents.tasks.send_agent_update_task.delay")
|
@patch("agents.tasks.send_agent_update_task.delay")
|
||||||
def test_update_agents(self, mock_task):
|
def test_update_agents(self, mock_task):
|
||||||
url = "/agents/updateagents/"
|
url = "/agents/updateagents/"
|
||||||
data = {"pks": [1, 2, 3, 5, 10], "version": "0.11.1"}
|
baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version=settings.LATEST_AGENT_VER,
|
||||||
|
_quantity=15,
|
||||||
|
)
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.3.0",
|
||||||
|
_quantity=15,
|
||||||
|
)
|
||||||
|
|
||||||
|
pks: list[int] = list(
|
||||||
|
Agent.objects.only("pk", "version").values_list("pk", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
data = {"pks": pks}
|
||||||
|
expected: list[int] = [
|
||||||
|
i.pk
|
||||||
|
for i in Agent.objects.only("pk", "version")
|
||||||
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
|
]
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
mock_task.assert_called_with(pks=data["pks"], version=data["version"])
|
mock_task.assert_called_with(pks=expected)
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
@@ -110,9 +227,8 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
@patch("agents.tasks.uninstall_agent_task.delay")
|
|
||||||
@patch("agents.views.reload_nats")
|
@patch("agents.views.reload_nats")
|
||||||
def test_uninstall(self, reload_nats, mock_task, nats_cmd):
|
def test_uninstall(self, reload_nats, nats_cmd):
|
||||||
url = "/agents/uninstall/"
|
url = "/agents/uninstall/"
|
||||||
data = {"pk": self.agent.pk}
|
data = {"pk": self.agent.pk}
|
||||||
|
|
||||||
@@ -121,13 +237,18 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
|
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
|
||||||
reload_nats.assert_called_once()
|
reload_nats.assert_called_once()
|
||||||
mock_task.assert_called_with(self.agent.salt_id, True)
|
|
||||||
|
|
||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_get_processes(self, mock_ret):
|
def test_get_processes(self, mock_ret):
|
||||||
url = f"/agents/{self.agent.pk}/getprocs/"
|
agent_old = baker.make_recipe("agents.online_agent", version="1.1.12")
|
||||||
|
url_old = f"/agents/{agent_old.pk}/getprocs/"
|
||||||
|
r = self.client.get(url_old)
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
|
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
|
||||||
|
url = f"/agents/{agent.pk}/getprocs/"
|
||||||
|
|
||||||
with open(
|
with open(
|
||||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/procs.json")
|
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/procs.json")
|
||||||
@@ -137,9 +258,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
assert any(i["name"] == "Registry" for i in mock_ret.return_value)
|
assert any(i["name"] == "Registry" for i in mock_ret.return_value)
|
||||||
assert any(
|
assert any(i["membytes"] == 434655234324 for i in mock_ret.return_value)
|
||||||
i["memory_percent"] == 0.004843281375620747 for i in mock_ret.return_value
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_ret.return_value = "timeout"
|
mock_ret.return_value = "timeout"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
@@ -166,18 +285,44 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_get_event_log(self, mock_ret):
|
def test_get_event_log(self, nats_cmd):
|
||||||
url = f"/agents/{self.agent.pk}/geteventlog/Application/30/"
|
url = f"/agents/{self.agent.pk}/geteventlog/Application/22/"
|
||||||
|
|
||||||
with open(
|
with open(
|
||||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
|
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
|
||||||
) as f:
|
) as f:
|
||||||
mock_ret.return_value = json.load(f)
|
nats_cmd.return_value = json.load(f)
|
||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{
|
||||||
|
"func": "eventlog",
|
||||||
|
"timeout": 30,
|
||||||
|
"payload": {
|
||||||
|
"logname": "Application",
|
||||||
|
"days": str(22),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
timeout=32,
|
||||||
|
)
|
||||||
|
|
||||||
mock_ret.return_value = "timeout"
|
url = f"/agents/{self.agent.pk}/geteventlog/Security/6/"
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{
|
||||||
|
"func": "eventlog",
|
||||||
|
"timeout": 180,
|
||||||
|
"payload": {
|
||||||
|
"logname": "Security",
|
||||||
|
"days": str(6),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
timeout=182,
|
||||||
|
)
|
||||||
|
|
||||||
|
nats_cmd.return_value = "timeout"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
@@ -212,7 +357,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
mock_ret.return_value = "nt authority\system"
|
mock_ret.return_value = "nt authority\system"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertIsInstance(r.data, str)
|
self.assertIsInstance(r.data, str) # type: ignore
|
||||||
|
|
||||||
mock_ret.return_value = "timeout"
|
mock_ret.return_value = "timeout"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -232,15 +377,15 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
nats_cmd.return_value = "ok"
|
nats_cmd.return_value = "ok"
|
||||||
r = self.client.patch(url, data, format="json")
|
r = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")
|
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM") # type: ignore
|
||||||
self.assertEqual(r.data["agent"], self.agent.hostname)
|
self.assertEqual(r.data["agent"], self.agent.hostname) # type: ignore
|
||||||
|
|
||||||
nats_data = {
|
nats_data = {
|
||||||
"func": "schedtask",
|
"func": "schedtask",
|
||||||
"schedtaskpayload": {
|
"schedtaskpayload": {
|
||||||
"type": "schedreboot",
|
"type": "schedreboot",
|
||||||
"trigger": "once",
|
"trigger": "once",
|
||||||
"name": r.data["task_name"],
|
"name": r.data["task_name"], # type: ignore
|
||||||
"year": 2025,
|
"year": 2025,
|
||||||
"month": "August",
|
"month": "August",
|
||||||
"day": 29,
|
"day": 29,
|
||||||
@@ -261,7 +406,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
r = self.client.patch(url, data_invalid, format="json")
|
r = self.client.patch(url, data_invalid, format="json")
|
||||||
|
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertEqual(r.data, "Invalid date")
|
self.assertEqual(r.data, "Invalid date") # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
@@ -272,8 +417,8 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
site = baker.make("clients.Site")
|
site = baker.make("clients.Site")
|
||||||
data = {
|
data = {
|
||||||
"client": site.client.id,
|
"client": site.client.id, # type: ignore
|
||||||
"site": site.id,
|
"site": site.id, # type: ignore
|
||||||
"arch": "64",
|
"arch": "64",
|
||||||
"expires": 23,
|
"expires": 23,
|
||||||
"installMethod": "exe",
|
"installMethod": "exe",
|
||||||
@@ -312,7 +457,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertIn("rdp", r.json()["cmd"])
|
self.assertIn("rdp", r.json()["cmd"])
|
||||||
self.assertNotIn("power", r.json()["cmd"])
|
self.assertNotIn("power", r.json()["cmd"])
|
||||||
self.assertNotIn("ping", r.json()["cmd"])
|
|
||||||
|
|
||||||
data.update({"ping": 1, "power": 1})
|
data.update({"ping": 1, "power": 1})
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -331,7 +475,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
data["mode"] = "salt"
|
data["mode"] = "mesh"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertIn("pending", r.json())
|
self.assertIn("pending", r.json())
|
||||||
@@ -351,21 +495,13 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.agent.version = "0.9.4"
|
self.agent.version = "0.9.4"
|
||||||
self.agent.save(update_fields=["version"])
|
self.agent.save(update_fields=["version"])
|
||||||
data["mode"] = "salt"
|
data["mode"] = "mesh"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertIn("0.9.5", r.json())
|
self.assertIn("0.9.5", r.json())
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
def test_agents_list(self):
|
|
||||||
url = "/agents/listagents/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
|
||||||
|
|
||||||
def test_agents_agent_detail(self):
|
def test_agents_agent_detail(self):
|
||||||
url = f"/agents/{self.agent.pk}/agentdetail/"
|
url = f"/agents/{self.agent.pk}/agentdetail/"
|
||||||
|
|
||||||
@@ -382,9 +518,10 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
edit = {
|
edit = {
|
||||||
"id": self.agent.pk,
|
"id": self.agent.pk,
|
||||||
"site": site.id,
|
"site": site.id, # type: ignore
|
||||||
"monitoring_type": "workstation",
|
"monitoring_type": "workstation",
|
||||||
"description": "asjdk234andasd",
|
"description": "asjdk234andasd",
|
||||||
|
"offline_time": 4,
|
||||||
"overdue_time": 300,
|
"overdue_time": 300,
|
||||||
"check_interval": 60,
|
"check_interval": 60,
|
||||||
"overdue_email_alert": True,
|
"overdue_email_alert": True,
|
||||||
@@ -412,7 +549,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
agent = Agent.objects.get(pk=self.agent.pk)
|
agent = Agent.objects.get(pk=self.agent.pk)
|
||||||
data = AgentSerializer(agent).data
|
data = AgentSerializer(agent).data
|
||||||
self.assertEqual(data["site"], site.id)
|
self.assertEqual(data["site"], site.id) # type: ignore
|
||||||
|
|
||||||
policy = WinUpdatePolicy.objects.get(agent=self.agent)
|
policy = WinUpdatePolicy.objects.get(agent=self.agent)
|
||||||
data = WinUpdatePolicySerializer(policy).data
|
data = WinUpdatePolicySerializer(policy).data
|
||||||
@@ -430,21 +567,21 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
# TODO
|
# TODO
|
||||||
# decode the cookie
|
# decode the cookie
|
||||||
|
|
||||||
self.assertIn("&viewmode=13", r.data["file"])
|
self.assertIn("&viewmode=13", r.data["file"]) # type: ignore
|
||||||
self.assertIn("&viewmode=12", r.data["terminal"])
|
self.assertIn("&viewmode=12", r.data["terminal"]) # type: ignore
|
||||||
self.assertIn("&viewmode=11", r.data["control"])
|
self.assertIn("&viewmode=11", r.data["control"]) # type: ignore
|
||||||
|
|
||||||
self.assertIn("&gotonode=", r.data["file"])
|
self.assertIn("&gotonode=", r.data["file"]) # type: ignore
|
||||||
self.assertIn("&gotonode=", r.data["terminal"])
|
self.assertIn("&gotonode=", r.data["terminal"]) # type: ignore
|
||||||
self.assertIn("&gotonode=", r.data["control"])
|
self.assertIn("&gotonode=", r.data["control"]) # type: ignore
|
||||||
|
|
||||||
self.assertIn("?login=", r.data["file"])
|
self.assertIn("?login=", r.data["file"]) # type: ignore
|
||||||
self.assertIn("?login=", r.data["terminal"])
|
self.assertIn("?login=", r.data["terminal"]) # type: ignore
|
||||||
self.assertIn("?login=", r.data["control"])
|
self.assertIn("?login=", r.data["control"]) # type: ignore
|
||||||
|
|
||||||
self.assertEqual(self.agent.hostname, r.data["hostname"])
|
self.assertEqual(self.agent.hostname, r.data["hostname"]) # type: ignore
|
||||||
self.assertEqual(self.agent.client.name, r.data["client"])
|
self.assertEqual(self.agent.client.name, r.data["client"]) # type: ignore
|
||||||
self.assertEqual(self.agent.site.name, r.data["site"])
|
self.assertEqual(self.agent.site.name, r.data["site"]) # type: ignore
|
||||||
|
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
@@ -454,70 +591,22 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
def test_by_client(self):
|
|
||||||
url = f"/agents/byclient/{self.agent.client.id}/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertTrue(r.data)
|
|
||||||
|
|
||||||
url = f"/agents/byclient/500/"
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertFalse(r.data) # returns empty list
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
|
||||||
|
|
||||||
def test_by_site(self):
|
|
||||||
url = f"/agents/bysite/{self.agent.site.id}/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertTrue(r.data)
|
|
||||||
|
|
||||||
url = f"/agents/bysite/500/"
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.data, [])
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
|
||||||
|
|
||||||
def test_overdue_action(self):
|
def test_overdue_action(self):
|
||||||
url = "/agents/overdueaction/"
|
url = "/agents/overdueaction/"
|
||||||
|
|
||||||
payload = {"pk": self.agent.pk, "alertType": "email", "action": "enabled"}
|
payload = {"pk": self.agent.pk, "overdue_email_alert": True}
|
||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
agent = Agent.objects.get(pk=self.agent.pk)
|
agent = Agent.objects.get(pk=self.agent.pk)
|
||||||
self.assertTrue(agent.overdue_email_alert)
|
self.assertTrue(agent.overdue_email_alert)
|
||||||
self.assertEqual(self.agent.hostname, r.data)
|
self.assertEqual(self.agent.hostname, r.data) # type: ignore
|
||||||
|
|
||||||
payload.update({"alertType": "email", "action": "disabled"})
|
payload = {"pk": self.agent.pk, "overdue_text_alert": False}
|
||||||
r = self.client.post(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
agent = Agent.objects.get(pk=self.agent.pk)
|
|
||||||
self.assertFalse(agent.overdue_email_alert)
|
|
||||||
self.assertEqual(self.agent.hostname, r.data)
|
|
||||||
|
|
||||||
payload.update({"alertType": "text", "action": "enabled"})
|
|
||||||
r = self.client.post(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
agent = Agent.objects.get(pk=self.agent.pk)
|
|
||||||
self.assertTrue(agent.overdue_text_alert)
|
|
||||||
self.assertEqual(self.agent.hostname, r.data)
|
|
||||||
|
|
||||||
payload.update({"alertType": "text", "action": "disabled"})
|
|
||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
agent = Agent.objects.get(pk=self.agent.pk)
|
agent = Agent.objects.get(pk=self.agent.pk)
|
||||||
self.assertFalse(agent.overdue_text_alert)
|
self.assertFalse(agent.overdue_text_alert)
|
||||||
self.assertEqual(self.agent.hostname, r.data)
|
self.assertEqual(self.agent.hostname, r.data) # type: ignore
|
||||||
|
|
||||||
payload.update({"alertType": "email", "action": "523423"})
|
|
||||||
r = self.client.post(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
payload.update({"alertType": "text", "action": "asdasd3434asdasd"})
|
|
||||||
r = self.client.post(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
@@ -539,7 +628,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
@patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
""" @patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||||
@patch("scripts.tasks.handle_bulk_script_task.delay")
|
@patch("scripts.tasks.handle_bulk_script_task.delay")
|
||||||
@patch("scripts.tasks.handle_bulk_command_task.delay")
|
@patch("scripts.tasks.handle_bulk_command_task.delay")
|
||||||
@patch("agents.models.Agent.salt_batch_async")
|
@patch("agents.models.Agent.salt_batch_async")
|
||||||
@@ -581,7 +670,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
""" payload = {
|
payload = {
|
||||||
"mode": "command",
|
"mode": "command",
|
||||||
"monType": "workstations",
|
"monType": "workstations",
|
||||||
"target": "client",
|
"target": "client",
|
||||||
@@ -595,7 +684,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300) """
|
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "command",
|
"mode": "command",
|
||||||
@@ -653,7 +742,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
# TODO mock the script
|
# TODO mock the script
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url) """
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_recover_mesh(self, nats_cmd):
|
def test_recover_mesh(self, nats_cmd):
|
||||||
@@ -661,7 +750,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
nats_cmd.return_value = "ok"
|
nats_cmd.return_value = "ok"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertIn(self.agent.hostname, r.data)
|
self.assertIn(self.agent.hostname, r.data) # type: ignore
|
||||||
nats_cmd.assert_called_with(
|
nats_cmd.assert_called_with(
|
||||||
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=45
|
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=45
|
||||||
)
|
)
|
||||||
@@ -676,10 +765,82 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
@patch("agents.tasks.run_script_email_results_task.delay")
|
||||||
|
@patch("agents.models.Agent.run_script")
|
||||||
|
def test_run_script(self, run_script, email_task):
|
||||||
|
run_script.return_value = "ok"
|
||||||
|
url = "/agents/runscript/"
|
||||||
|
script = baker.make_recipe("scripts.script")
|
||||||
|
|
||||||
|
# test wait
|
||||||
|
data = {
|
||||||
|
"pk": self.agent.pk,
|
||||||
|
"scriptPK": script.pk,
|
||||||
|
"output": "wait",
|
||||||
|
"args": [],
|
||||||
|
"timeout": 15,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
run_script.assert_called_with(
|
||||||
|
scriptpk=script.pk, args=[], timeout=18, wait=True
|
||||||
|
)
|
||||||
|
run_script.reset_mock()
|
||||||
|
|
||||||
|
# test email default
|
||||||
|
data = {
|
||||||
|
"pk": self.agent.pk,
|
||||||
|
"scriptPK": script.pk,
|
||||||
|
"output": "email",
|
||||||
|
"args": ["abc", "123"],
|
||||||
|
"timeout": 15,
|
||||||
|
"emailmode": "default",
|
||||||
|
"emails": ["admin@example.com", "bob@example.com"],
|
||||||
|
}
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
email_task.assert_called_with(
|
||||||
|
agentpk=self.agent.pk,
|
||||||
|
scriptpk=script.pk,
|
||||||
|
nats_timeout=18,
|
||||||
|
emails=[],
|
||||||
|
args=["abc", "123"],
|
||||||
|
)
|
||||||
|
email_task.reset_mock()
|
||||||
|
|
||||||
|
# test email overrides
|
||||||
|
data["emailmode"] = "custom"
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
email_task.assert_called_with(
|
||||||
|
agentpk=self.agent.pk,
|
||||||
|
scriptpk=script.pk,
|
||||||
|
nats_timeout=18,
|
||||||
|
emails=["admin@example.com", "bob@example.com"],
|
||||||
|
args=["abc", "123"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# test fire and forget
|
||||||
|
data = {
|
||||||
|
"pk": self.agent.pk,
|
||||||
|
"scriptPK": script.pk,
|
||||||
|
"output": "forget",
|
||||||
|
"args": ["hello", "world"],
|
||||||
|
"timeout": 22,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
run_script.assert_called_with(
|
||||||
|
scriptpk=script.pk, args=["hello", "world"], timeout=25
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestAgentViewsNew(TacticalTestCase):
|
class TestAgentViewsNew(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.authenticate()
|
self.authenticate()
|
||||||
|
self.setup_coresettings()
|
||||||
|
|
||||||
def test_agent_counts(self):
|
def test_agent_counts(self):
|
||||||
url = "/agents/agent_counts/"
|
url = "/agents/agent_counts/"
|
||||||
@@ -690,15 +851,12 @@ class TestAgentViewsNew(TacticalTestCase):
|
|||||||
monitoring_type=cycle(["server", "workstation"]),
|
monitoring_type=cycle(["server", "workstation"]),
|
||||||
_quantity=6,
|
_quantity=6,
|
||||||
)
|
)
|
||||||
agents = baker.make_recipe(
|
baker.make_recipe(
|
||||||
"agents.overdue_agent",
|
"agents.overdue_agent",
|
||||||
monitoring_type=cycle(["server", "workstation"]),
|
monitoring_type=cycle(["server", "workstation"]),
|
||||||
_quantity=6,
|
_quantity=6,
|
||||||
)
|
)
|
||||||
|
|
||||||
# make an AgentOutage for every overdue agent
|
|
||||||
baker.make("agents.AgentOutage", agent=cycle(agents), _quantity=6)
|
|
||||||
|
|
||||||
# returned data should be this
|
# returned data should be this
|
||||||
data = {
|
data = {
|
||||||
"total_server_count": 6,
|
"total_server_count": 6,
|
||||||
@@ -709,7 +867,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.post(url, format="json")
|
r = self.client.post(url, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data, data)
|
self.assertEqual(r.data, data) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
@@ -721,14 +879,14 @@ class TestAgentViewsNew(TacticalTestCase):
|
|||||||
agent = baker.make_recipe("agents.agent", site=site)
|
agent = baker.make_recipe("agents.agent", site=site)
|
||||||
|
|
||||||
# Test client toggle maintenance mode
|
# Test client toggle maintenance mode
|
||||||
data = {"type": "Client", "id": site.client.id, "action": True}
|
data = {"type": "Client", "id": site.client.id, "action": True} # type: ignore
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(Agent.objects.get(pk=agent.pk).maintenance_mode)
|
self.assertTrue(Agent.objects.get(pk=agent.pk).maintenance_mode)
|
||||||
|
|
||||||
# Test site toggle maintenance mode
|
# Test site toggle maintenance mode
|
||||||
data = {"type": "Site", "id": site.id, "action": False}
|
data = {"type": "Site", "id": site.id, "action": False} # type: ignore
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
@@ -755,41 +913,6 @@ class TestAgentTasks(TacticalTestCase):
|
|||||||
self.authenticate()
|
self.authenticate()
|
||||||
self.setup_coresettings()
|
self.setup_coresettings()
|
||||||
|
|
||||||
@patch("agents.models.Agent.salt_api_cmd")
|
|
||||||
def test_sync_salt_modules_task(self, salt_api_cmd):
|
|
||||||
self.agent = baker.make_recipe("agents.agent")
|
|
||||||
salt_api_cmd.return_value = {"return": [{f"{self.agent.salt_id}": []}]}
|
|
||||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
|
||||||
salt_api_cmd.assert_called_with(timeout=35, func="saltutil.sync_modules")
|
|
||||||
self.assertEqual(
|
|
||||||
ret.result, f"Successfully synced salt modules on {self.agent.hostname}"
|
|
||||||
)
|
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
|
||||||
|
|
||||||
salt_api_cmd.return_value = "timeout"
|
|
||||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
|
||||||
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
|
|
||||||
|
|
||||||
salt_api_cmd.return_value = "error"
|
|
||||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
|
||||||
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
|
|
||||||
|
|
||||||
@patch("agents.models.Agent.salt_batch_async", return_value=None)
|
|
||||||
@patch("agents.tasks.sleep", return_value=None)
|
|
||||||
def test_batch_sync_modules_task(self, mock_sleep, salt_batch_async):
|
|
||||||
# chunks of 50, should run 4 times
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent", last_seen=djangotime.now(), _quantity=60
|
|
||||||
)
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.overdue_agent",
|
|
||||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=9),
|
|
||||||
_quantity=115,
|
|
||||||
)
|
|
||||||
ret = batch_sync_modules_task.s().apply()
|
|
||||||
self.assertEqual(salt_batch_async.call_count, 4)
|
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_agent_update(self, nats_cmd):
|
def test_agent_update(self, nats_cmd):
|
||||||
from agents.tasks import agent_update
|
from agents.tasks import agent_update
|
||||||
@@ -797,41 +920,55 @@ class TestAgentTasks(TacticalTestCase):
|
|||||||
agent_noarch = baker.make_recipe(
|
agent_noarch = baker.make_recipe(
|
||||||
"agents.agent",
|
"agents.agent",
|
||||||
operating_system="Error getting OS",
|
operating_system="Error getting OS",
|
||||||
version="1.1.11",
|
version=settings.LATEST_AGENT_VER,
|
||||||
)
|
)
|
||||||
r = agent_update(agent_noarch.pk)
|
r = agent_update(agent_noarch.pk)
|
||||||
self.assertEqual(r, "noarch")
|
self.assertEqual(r, "noarch")
|
||||||
self.assertEqual(
|
|
||||||
PendingAction.objects.filter(
|
|
||||||
agent=agent_noarch, action_type="agentupdate"
|
|
||||||
).count(),
|
|
||||||
0,
|
|
||||||
)
|
|
||||||
|
|
||||||
agent64_111 = baker.make_recipe(
|
agent_1111 = baker.make_recipe(
|
||||||
"agents.agent",
|
"agents.agent",
|
||||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
version="1.1.11",
|
version="1.1.11",
|
||||||
)
|
)
|
||||||
|
r = agent_update(agent_1111.pk)
|
||||||
|
self.assertEqual(r, "not supported")
|
||||||
|
|
||||||
r = agent_update(agent64_111.pk)
|
agent64_1112 = baker.make_recipe(
|
||||||
self.assertEqual(r, "created")
|
|
||||||
action = PendingAction.objects.get(agent__pk=agent64_111.pk)
|
|
||||||
self.assertEqual(action.action_type, "agentupdate")
|
|
||||||
self.assertEqual(action.status, "pending")
|
|
||||||
self.assertEqual(action.details["url"], settings.DL_64)
|
|
||||||
self.assertEqual(
|
|
||||||
action.details["inno"], f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
|
||||||
)
|
|
||||||
self.assertEqual(action.details["version"], settings.LATEST_AGENT_VER)
|
|
||||||
|
|
||||||
agent64 = baker.make_recipe(
|
|
||||||
"agents.agent",
|
"agents.agent",
|
||||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
version="1.1.12",
|
version="1.1.12",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
r = agent_update(agent64_1112.pk)
|
||||||
|
self.assertEqual(r, "created")
|
||||||
|
action = PendingAction.objects.get(agent__pk=agent64_1112.pk)
|
||||||
|
self.assertEqual(action.action_type, "agentupdate")
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
self.assertEqual(
|
||||||
|
action.details["url"],
|
||||||
|
"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||||
|
)
|
||||||
|
self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe")
|
||||||
|
self.assertEqual(action.details["version"], "1.3.0")
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{
|
||||||
|
"func": "agentupdate",
|
||||||
|
"payload": {
|
||||||
|
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||||
|
"version": "1.3.0",
|
||||||
|
"inno": "winagent-v1.3.0.exe",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wait=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
agent_64_130 = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.3.0",
|
||||||
|
)
|
||||||
nats_cmd.return_value = "ok"
|
nats_cmd.return_value = "ok"
|
||||||
r = agent_update(agent64.pk)
|
r = agent_update(agent_64_130.pk)
|
||||||
self.assertEqual(r, "created")
|
self.assertEqual(r, "created")
|
||||||
nats_cmd.assert_called_with(
|
nats_cmd.assert_called_with(
|
||||||
{
|
{
|
||||||
@@ -844,108 +981,34 @@ class TestAgentTasks(TacticalTestCase):
|
|||||||
},
|
},
|
||||||
wait=False,
|
wait=False,
|
||||||
)
|
)
|
||||||
|
action = PendingAction.objects.get(agent__pk=agent_64_130.pk)
|
||||||
|
self.assertEqual(action.action_type, "agentupdate")
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
|
||||||
""" @patch("agents.models.Agent.salt_api_async")
|
@patch("agents.tasks.agent_update")
|
||||||
@patch("agents.tasks.sleep", return_value=None)
|
@patch("agents.tasks.sleep", return_value=None)
|
||||||
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
|
def test_auto_self_agent_update_task(self, mock_sleep, agent_update):
|
||||||
# test 64bit golang agent
|
baker.make_recipe(
|
||||||
self.agent64 = baker.make_recipe(
|
|
||||||
"agents.agent",
|
"agents.agent",
|
||||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
version="1.0.0",
|
version=settings.LATEST_AGENT_VER,
|
||||||
|
_quantity=23,
|
||||||
)
|
)
|
||||||
salt_api_async.return_value = True
|
baker.make_recipe(
|
||||||
ret = auto_self_agent_update_task.s().apply()
|
|
||||||
salt_api_async.assert_called_with(
|
|
||||||
func="win_agent.do_agent_update_v2",
|
|
||||||
kwargs={
|
|
||||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
|
||||||
"url": settings.DL_64,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
|
||||||
self.agent64.delete()
|
|
||||||
salt_api_async.reset_mock()
|
|
||||||
|
|
||||||
# test 32bit golang agent
|
|
||||||
self.agent32 = baker.make_recipe(
|
|
||||||
"agents.agent",
|
|
||||||
operating_system="Windows 7 Professional, 32 bit (build 7601.24544)",
|
|
||||||
version="1.0.0",
|
|
||||||
)
|
|
||||||
salt_api_async.return_value = True
|
|
||||||
ret = auto_self_agent_update_task.s().apply()
|
|
||||||
salt_api_async.assert_called_with(
|
|
||||||
func="win_agent.do_agent_update_v2",
|
|
||||||
kwargs={
|
|
||||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe",
|
|
||||||
"url": settings.DL_32,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
|
||||||
self.agent32.delete()
|
|
||||||
salt_api_async.reset_mock()
|
|
||||||
|
|
||||||
# test agent that has a null os field
|
|
||||||
self.agentNone = baker.make_recipe(
|
|
||||||
"agents.agent",
|
|
||||||
operating_system=None,
|
|
||||||
version="1.0.0",
|
|
||||||
)
|
|
||||||
ret = auto_self_agent_update_task.s().apply()
|
|
||||||
salt_api_async.assert_not_called()
|
|
||||||
self.agentNone.delete()
|
|
||||||
salt_api_async.reset_mock()
|
|
||||||
|
|
||||||
# test auto update disabled in global settings
|
|
||||||
self.agent64 = baker.make_recipe(
|
|
||||||
"agents.agent",
|
"agents.agent",
|
||||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
version="1.0.0",
|
version="1.3.0",
|
||||||
|
_quantity=33,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.coresettings.agent_auto_update = False
|
self.coresettings.agent_auto_update = False
|
||||||
self.coresettings.save(update_fields=["agent_auto_update"])
|
self.coresettings.save(update_fields=["agent_auto_update"])
|
||||||
ret = auto_self_agent_update_task.s().apply()
|
|
||||||
salt_api_async.assert_not_called()
|
|
||||||
|
|
||||||
# reset core settings
|
r = auto_self_agent_update_task.s().apply()
|
||||||
self.agent64.delete()
|
self.assertEqual(agent_update.call_count, 0)
|
||||||
salt_api_async.reset_mock()
|
|
||||||
self.coresettings.agent_auto_update = True
|
self.coresettings.agent_auto_update = True
|
||||||
self.coresettings.save(update_fields=["agent_auto_update"])
|
self.coresettings.save(update_fields=["agent_auto_update"])
|
||||||
|
|
||||||
# test 64bit python agent
|
r = auto_self_agent_update_task.s().apply()
|
||||||
self.agent64py = baker.make_recipe(
|
self.assertEqual(agent_update.call_count, 33)
|
||||||
"agents.agent",
|
|
||||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
|
||||||
version="0.11.1",
|
|
||||||
)
|
|
||||||
salt_api_async.return_value = True
|
|
||||||
ret = auto_self_agent_update_task.s().apply()
|
|
||||||
salt_api_async.assert_called_with(
|
|
||||||
func="win_agent.do_agent_update_v2",
|
|
||||||
kwargs={
|
|
||||||
"inno": "winagent-v0.11.2.exe",
|
|
||||||
"url": OLD_64_PY_AGENT,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
|
||||||
self.agent64py.delete()
|
|
||||||
salt_api_async.reset_mock()
|
|
||||||
|
|
||||||
# test 32bit python agent
|
|
||||||
self.agent32py = baker.make_recipe(
|
|
||||||
"agents.agent",
|
|
||||||
operating_system="Windows 7 Professional, 32 bit (build 7601.24544)",
|
|
||||||
version="0.11.1",
|
|
||||||
)
|
|
||||||
salt_api_async.return_value = True
|
|
||||||
ret = auto_self_agent_update_task.s().apply()
|
|
||||||
salt_api_async.assert_called_with(
|
|
||||||
func="win_agent.do_agent_update_v2",
|
|
||||||
kwargs={
|
|
||||||
"inno": "winagent-v0.11.2-x86.exe",
|
|
||||||
"url": OLD_32_PY_AGENT,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertEqual(ret.status, "SUCCESS") """
|
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("listagents/", views.AgentsTableList.as_view()),
|
path("listagents/", views.AgentsTableList.as_view()),
|
||||||
path("listagentsnodetail/", views.list_agents_no_detail),
|
path("listagentsnodetail/", views.list_agents_no_detail),
|
||||||
path("<int:pk>/agenteditdetails/", views.agent_edit_details),
|
path("<int:pk>/agenteditdetails/", views.agent_edit_details),
|
||||||
path("byclient/<int:clientpk>/", views.by_client),
|
|
||||||
path("bysite/<int:sitepk>/", views.by_site),
|
|
||||||
path("overdueaction/", views.overdue_action),
|
path("overdueaction/", views.overdue_action),
|
||||||
path("sendrawcmd/", views.send_raw_cmd),
|
path("sendrawcmd/", views.send_raw_cmd),
|
||||||
path("<pk>/agentdetail/", views.agent_detail),
|
path("<pk>/agentdetail/", views.agent_detail),
|
||||||
|
|||||||
@@ -1,46 +1,45 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from loguru import logger
|
import datetime as dt
|
||||||
import os
|
import os
|
||||||
import subprocess
|
|
||||||
import pytz
|
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import datetime as dt
|
|
||||||
from packaging import version as pyver
|
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.shortcuts import get_object_or_404
|
from django.core.paginator import Paginator
|
||||||
|
from django.db.models import Q
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
from loguru import logger
|
||||||
|
from packaging import version as pyver
|
||||||
|
from rest_framework import status
|
||||||
from rest_framework.decorators import api_view
|
from rest_framework.decorators import api_view
|
||||||
from rest_framework.views import APIView
|
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status, generics
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from .models import Agent, AgentOutage, RecoveryAction, Note
|
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
from scripts.models import Script
|
|
||||||
from logs.models import AuditLog, PendingAction
|
from logs.models import AuditLog, PendingAction
|
||||||
|
from scripts.models import Script
|
||||||
|
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||||
|
from tacticalrmm.utils import (
|
||||||
|
generate_installer_exe,
|
||||||
|
get_default_timezone,
|
||||||
|
notify_error,
|
||||||
|
reload_nats,
|
||||||
|
)
|
||||||
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||||
|
|
||||||
|
from .models import Agent, Note, RecoveryAction
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
AgentSerializer,
|
|
||||||
AgentHostnameSerializer,
|
|
||||||
AgentTableSerializer,
|
|
||||||
AgentEditSerializer,
|
AgentEditSerializer,
|
||||||
|
AgentHostnameSerializer,
|
||||||
|
AgentOverdueActionSerializer,
|
||||||
|
AgentSerializer,
|
||||||
|
AgentTableSerializer,
|
||||||
NoteSerializer,
|
NoteSerializer,
|
||||||
NotesSerializer,
|
NotesSerializer,
|
||||||
)
|
)
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from .tasks import run_script_email_results_task, send_agent_update_task
|
||||||
|
|
||||||
from .tasks import (
|
|
||||||
uninstall_agent_task,
|
|
||||||
send_agent_update_task,
|
|
||||||
run_script_email_results_task,
|
|
||||||
)
|
|
||||||
from winupdate.tasks import bulk_check_for_updates_task
|
|
||||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
|
||||||
|
|
||||||
from tacticalrmm.utils import notify_error, reload_nats
|
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
@@ -58,9 +57,13 @@ def get_agent_versions(request):
|
|||||||
|
|
||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
def update_agents(request):
|
def update_agents(request):
|
||||||
pks = request.data["pks"]
|
q = Agent.objects.filter(pk__in=request.data["pks"]).only("pk", "version")
|
||||||
version = request.data["version"]
|
pks: list[int] = [
|
||||||
send_agent_update_task.delay(pks=pks, version=version)
|
i.pk
|
||||||
|
for i in q
|
||||||
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
|
]
|
||||||
|
send_agent_update_task.delay(pks=pks)
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
@@ -72,10 +75,6 @@ def ping(request, pk):
|
|||||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||||
if r == "pong":
|
if r == "pong":
|
||||||
status = "online"
|
status = "online"
|
||||||
else:
|
|
||||||
r = agent.salt_api_cmd(timeout=5, func="test.ping")
|
|
||||||
if isinstance(r, bool) and r:
|
|
||||||
status = "online"
|
|
||||||
|
|
||||||
return Response({"name": agent.hostname, "status": status})
|
return Response({"name": agent.hostname, "status": status})
|
||||||
|
|
||||||
@@ -86,13 +85,9 @@ def uninstall(request):
|
|||||||
if agent.has_nats:
|
if agent.has_nats:
|
||||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||||
|
|
||||||
salt_id = agent.salt_id
|
|
||||||
name = agent.hostname
|
name = agent.hostname
|
||||||
has_nats = agent.has_nats
|
|
||||||
agent.delete()
|
agent.delete()
|
||||||
reload_nats()
|
reload_nats()
|
||||||
|
|
||||||
uninstall_agent_task.delay(salt_id, has_nats)
|
|
||||||
return Response(f"{name} will now be uninstalled.")
|
return Response(f"{name} will now be uninstalled.")
|
||||||
|
|
||||||
|
|
||||||
@@ -100,22 +95,17 @@ def uninstall(request):
|
|||||||
def edit_agent(request):
|
def edit_agent(request):
|
||||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||||
|
|
||||||
old_site = agent.site.pk
|
|
||||||
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
|
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
|
||||||
a_serializer.is_valid(raise_exception=True)
|
a_serializer.is_valid(raise_exception=True)
|
||||||
a_serializer.save()
|
a_serializer.save()
|
||||||
|
|
||||||
policy = agent.winupdatepolicy.get()
|
if "winupdatepolicy" in request.data.keys():
|
||||||
p_serializer = WinUpdatePolicySerializer(
|
policy = agent.winupdatepolicy.get() # type: ignore
|
||||||
instance=policy, data=request.data["winupdatepolicy"][0]
|
p_serializer = WinUpdatePolicySerializer(
|
||||||
)
|
instance=policy, data=request.data["winupdatepolicy"][0]
|
||||||
p_serializer.is_valid(raise_exception=True)
|
)
|
||||||
p_serializer.save()
|
p_serializer.is_valid(raise_exception=True)
|
||||||
|
p_serializer.save()
|
||||||
# check if site changed and initiate generating correct policies
|
|
||||||
if old_site != request.data["site"]:
|
|
||||||
agent.generate_checks_from_policies(clear=True)
|
|
||||||
agent.generate_tasks_from_policies(clear=True)
|
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
@@ -159,12 +149,12 @@ def agent_detail(request, pk):
|
|||||||
@api_view()
|
@api_view()
|
||||||
def get_processes(request, pk):
|
def get_processes(request, pk):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
if not agent.has_nats:
|
if pyver.parse(agent.version) < pyver.parse("1.2.0"):
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
return notify_error("Requires agent version 1.2.0 or greater")
|
||||||
|
|
||||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||||
if r == "timeout":
|
if r == "timeout":
|
||||||
return notify_error("Unable to contact the agent")
|
return notify_error("Unable to contact the agent")
|
||||||
|
|
||||||
return Response(r)
|
return Response(r)
|
||||||
|
|
||||||
|
|
||||||
@@ -191,15 +181,16 @@ def get_event_log(request, pk, logtype, days):
|
|||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
if not agent.has_nats:
|
if not agent.has_nats:
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
return notify_error("Requires agent version 1.1.0 or greater")
|
||||||
|
timeout = 180 if logtype == "Security" else 30
|
||||||
data = {
|
data = {
|
||||||
"func": "eventlog",
|
"func": "eventlog",
|
||||||
"timeout": 30,
|
"timeout": timeout,
|
||||||
"payload": {
|
"payload": {
|
||||||
"logname": logtype,
|
"logname": logtype,
|
||||||
"days": str(days),
|
"days": str(days),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
r = asyncio.run(agent.nats_cmd(data, timeout=32))
|
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||||
if r == "timeout":
|
if r == "timeout":
|
||||||
return notify_error("Unable to contact the agent")
|
return notify_error("Unable to contact the agent")
|
||||||
|
|
||||||
@@ -235,38 +226,74 @@ def send_raw_cmd(request):
|
|||||||
return Response(r)
|
return Response(r)
|
||||||
|
|
||||||
|
|
||||||
class AgentsTableList(generics.ListAPIView):
|
class AgentsTableList(APIView):
|
||||||
queryset = (
|
def patch(self, request):
|
||||||
Agent.objects.select_related("site")
|
pagination = request.data["pagination"]
|
||||||
.prefetch_related("agentchecks")
|
monType = request.data["monType"]
|
||||||
.only(
|
client = Q()
|
||||||
"pk",
|
site = Q()
|
||||||
"hostname",
|
mon_type = Q()
|
||||||
"agent_id",
|
|
||||||
"site",
|
|
||||||
"monitoring_type",
|
|
||||||
"description",
|
|
||||||
"needs_reboot",
|
|
||||||
"overdue_text_alert",
|
|
||||||
"overdue_email_alert",
|
|
||||||
"overdue_time",
|
|
||||||
"last_seen",
|
|
||||||
"boot_time",
|
|
||||||
"logged_in_username",
|
|
||||||
"last_logged_in_user",
|
|
||||||
"time_zone",
|
|
||||||
"maintenance_mode",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
serializer_class = AgentTableSerializer
|
|
||||||
|
|
||||||
def list(self, request):
|
if pagination["sortBy"] == "agentstatus":
|
||||||
queryset = self.get_queryset()
|
sort = "last_seen"
|
||||||
ctx = {
|
elif pagination["sortBy"] == "client_name":
|
||||||
"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)
|
sort = "site__client__name"
|
||||||
}
|
elif pagination["sortBy"] == "site_name":
|
||||||
serializer = AgentTableSerializer(queryset, many=True, context=ctx)
|
sort = "site__name"
|
||||||
return Response(serializer.data)
|
elif pagination["sortBy"] == "user":
|
||||||
|
sort = "logged_in_username"
|
||||||
|
else:
|
||||||
|
sort = pagination["sortBy"]
|
||||||
|
|
||||||
|
order_by = f"-{sort}" if pagination["descending"] else sort
|
||||||
|
|
||||||
|
if monType == "server":
|
||||||
|
mon_type = Q(monitoring_type="server")
|
||||||
|
elif monType == "workstation":
|
||||||
|
mon_type = Q(monitoring_type="workstation")
|
||||||
|
|
||||||
|
if "clientPK" in request.data:
|
||||||
|
client = Q(site__client_id=request.data["clientPK"])
|
||||||
|
|
||||||
|
if "sitePK" in request.data:
|
||||||
|
site = Q(site_id=request.data["sitePK"])
|
||||||
|
|
||||||
|
queryset = (
|
||||||
|
Agent.objects.select_related("site")
|
||||||
|
.prefetch_related("agentchecks")
|
||||||
|
.filter(mon_type)
|
||||||
|
.filter(client)
|
||||||
|
.filter(site)
|
||||||
|
.only(
|
||||||
|
"pk",
|
||||||
|
"hostname",
|
||||||
|
"agent_id",
|
||||||
|
"site",
|
||||||
|
"monitoring_type",
|
||||||
|
"description",
|
||||||
|
"needs_reboot",
|
||||||
|
"overdue_text_alert",
|
||||||
|
"overdue_email_alert",
|
||||||
|
"overdue_time",
|
||||||
|
"offline_time",
|
||||||
|
"last_seen",
|
||||||
|
"boot_time",
|
||||||
|
"logged_in_username",
|
||||||
|
"last_logged_in_user",
|
||||||
|
"time_zone",
|
||||||
|
"maintenance_mode",
|
||||||
|
)
|
||||||
|
.order_by(order_by)
|
||||||
|
)
|
||||||
|
paginator = Paginator(queryset, pagination["rowsPerPage"])
|
||||||
|
|
||||||
|
ctx = {"default_tz": get_default_timezone()}
|
||||||
|
serializer = AgentTableSerializer(
|
||||||
|
paginator.get_page(pagination["page"]), many=True, context=ctx
|
||||||
|
)
|
||||||
|
|
||||||
|
ret = {"agents": serializer.data, "total": paginator.count}
|
||||||
|
return Response(ret)
|
||||||
|
|
||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
@@ -281,86 +308,14 @@ def agent_edit_details(request, pk):
|
|||||||
return Response(AgentEditSerializer(agent).data)
|
return Response(AgentEditSerializer(agent).data)
|
||||||
|
|
||||||
|
|
||||||
@api_view()
|
|
||||||
def by_client(request, clientpk):
|
|
||||||
agents = (
|
|
||||||
Agent.objects.select_related("site")
|
|
||||||
.filter(site__client_id=clientpk)
|
|
||||||
.prefetch_related("agentchecks")
|
|
||||||
.only(
|
|
||||||
"pk",
|
|
||||||
"hostname",
|
|
||||||
"agent_id",
|
|
||||||
"site",
|
|
||||||
"monitoring_type",
|
|
||||||
"description",
|
|
||||||
"needs_reboot",
|
|
||||||
"overdue_text_alert",
|
|
||||||
"overdue_email_alert",
|
|
||||||
"overdue_time",
|
|
||||||
"last_seen",
|
|
||||||
"boot_time",
|
|
||||||
"logged_in_username",
|
|
||||||
"last_logged_in_user",
|
|
||||||
"time_zone",
|
|
||||||
"maintenance_mode",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
ctx = {"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)}
|
|
||||||
return Response(AgentTableSerializer(agents, many=True, context=ctx).data)
|
|
||||||
|
|
||||||
|
|
||||||
@api_view()
|
|
||||||
def by_site(request, sitepk):
|
|
||||||
agents = (
|
|
||||||
Agent.objects.filter(site_id=sitepk)
|
|
||||||
.select_related("site")
|
|
||||||
.prefetch_related("agentchecks")
|
|
||||||
.only(
|
|
||||||
"pk",
|
|
||||||
"hostname",
|
|
||||||
"agent_id",
|
|
||||||
"site",
|
|
||||||
"monitoring_type",
|
|
||||||
"description",
|
|
||||||
"needs_reboot",
|
|
||||||
"overdue_text_alert",
|
|
||||||
"overdue_email_alert",
|
|
||||||
"overdue_time",
|
|
||||||
"last_seen",
|
|
||||||
"boot_time",
|
|
||||||
"logged_in_username",
|
|
||||||
"last_logged_in_user",
|
|
||||||
"time_zone",
|
|
||||||
"maintenance_mode",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
ctx = {"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)}
|
|
||||||
return Response(AgentTableSerializer(agents, many=True, context=ctx).data)
|
|
||||||
|
|
||||||
|
|
||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
def overdue_action(request):
|
def overdue_action(request):
|
||||||
pk = request.data["pk"]
|
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||||
alert_type = request.data["alertType"]
|
serializer = AgentOverdueActionSerializer(
|
||||||
action = request.data["action"]
|
instance=agent, data=request.data, partial=True
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
)
|
||||||
if alert_type == "email" and action == "enabled":
|
serializer.is_valid(raise_exception=True)
|
||||||
agent.overdue_email_alert = True
|
serializer.save()
|
||||||
agent.save(update_fields=["overdue_email_alert"])
|
|
||||||
elif alert_type == "email" and action == "disabled":
|
|
||||||
agent.overdue_email_alert = False
|
|
||||||
agent.save(update_fields=["overdue_email_alert"])
|
|
||||||
elif alert_type == "text" and action == "enabled":
|
|
||||||
agent.overdue_text_alert = True
|
|
||||||
agent.save(update_fields=["overdue_text_alert"])
|
|
||||||
elif alert_type == "text" and action == "disabled":
|
|
||||||
agent.overdue_text_alert = False
|
|
||||||
agent.save(update_fields=["overdue_text_alert"])
|
|
||||||
else:
|
|
||||||
return Response(
|
|
||||||
{"error": "Something went wrong"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
return Response(agent.hostname)
|
return Response(agent.hostname)
|
||||||
|
|
||||||
|
|
||||||
@@ -454,124 +409,20 @@ def install_agent(request):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if request.data["installMethod"] == "exe":
|
if request.data["installMethod"] == "exe":
|
||||||
go_bin = "/usr/local/rmmgo/go/bin/go"
|
return generate_installer_exe(
|
||||||
|
file_name="rmm-installer.exe",
|
||||||
if not os.path.exists(go_bin):
|
goarch="amd64" if arch == "64" else "386",
|
||||||
return Response("nogolang", status=status.HTTP_409_CONFLICT)
|
inno=inno,
|
||||||
|
api=request.data["api"],
|
||||||
api = request.data["api"]
|
client_id=client_id,
|
||||||
atype = request.data["agenttype"]
|
site_id=site_id,
|
||||||
rdp = request.data["rdp"]
|
atype=request.data["agenttype"],
|
||||||
ping = request.data["ping"]
|
rdp=request.data["rdp"],
|
||||||
power = request.data["power"]
|
ping=request.data["ping"],
|
||||||
|
power=request.data["power"],
|
||||||
file_name = "rmm-installer.exe"
|
download_url=download_url,
|
||||||
exe = os.path.join(settings.EXE_DIR, file_name)
|
token=token,
|
||||||
|
)
|
||||||
if os.path.exists(exe):
|
|
||||||
try:
|
|
||||||
os.remove(exe)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(str(e))
|
|
||||||
|
|
||||||
goarch = "amd64" if arch == "64" else "386"
|
|
||||||
cmd = [
|
|
||||||
"env",
|
|
||||||
"GOOS=windows",
|
|
||||||
f"GOARCH={goarch}",
|
|
||||||
go_bin,
|
|
||||||
"build",
|
|
||||||
f"-ldflags=\"-X 'main.Inno={inno}'",
|
|
||||||
f"-X 'main.Api={api}'",
|
|
||||||
f"-X 'main.Client={client_id}'",
|
|
||||||
f"-X 'main.Site={site_id}'",
|
|
||||||
f"-X 'main.Atype={atype}'",
|
|
||||||
f"-X 'main.Rdp={rdp}'",
|
|
||||||
f"-X 'main.Ping={ping}'",
|
|
||||||
f"-X 'main.Power={power}'",
|
|
||||||
f"-X 'main.DownloadUrl={download_url}'",
|
|
||||||
f"-X 'main.Token={token}'\"",
|
|
||||||
"-o",
|
|
||||||
exe,
|
|
||||||
]
|
|
||||||
|
|
||||||
build_error = False
|
|
||||||
gen_error = False
|
|
||||||
|
|
||||||
gen = [
|
|
||||||
"env",
|
|
||||||
"GOOS=windows",
|
|
||||||
f"GOARCH={goarch}",
|
|
||||||
go_bin,
|
|
||||||
"generate",
|
|
||||||
]
|
|
||||||
try:
|
|
||||||
r1 = subprocess.run(
|
|
||||||
" ".join(gen),
|
|
||||||
capture_output=True,
|
|
||||||
shell=True,
|
|
||||||
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
gen_error = True
|
|
||||||
logger.error(str(e))
|
|
||||||
return Response(
|
|
||||||
"genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE
|
|
||||||
)
|
|
||||||
|
|
||||||
if r1.returncode != 0:
|
|
||||||
gen_error = True
|
|
||||||
if r1.stdout:
|
|
||||||
logger.error(r1.stdout.decode("utf-8", errors="ignore"))
|
|
||||||
|
|
||||||
if r1.stderr:
|
|
||||||
logger.error(r1.stderr.decode("utf-8", errors="ignore"))
|
|
||||||
|
|
||||||
logger.error(f"Go build failed with return code {r1.returncode}")
|
|
||||||
|
|
||||||
if gen_error:
|
|
||||||
return Response(
|
|
||||||
"genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
r = subprocess.run(
|
|
||||||
" ".join(cmd),
|
|
||||||
capture_output=True,
|
|
||||||
shell=True,
|
|
||||||
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
build_error = True
|
|
||||||
logger.error(str(e))
|
|
||||||
return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED)
|
|
||||||
|
|
||||||
if r.returncode != 0:
|
|
||||||
build_error = True
|
|
||||||
if r.stdout:
|
|
||||||
logger.error(r.stdout.decode("utf-8", errors="ignore"))
|
|
||||||
|
|
||||||
if r.stderr:
|
|
||||||
logger.error(r.stderr.decode("utf-8", errors="ignore"))
|
|
||||||
|
|
||||||
logger.error(f"Go build failed with return code {r.returncode}")
|
|
||||||
|
|
||||||
if build_error:
|
|
||||||
return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED)
|
|
||||||
|
|
||||||
if settings.DEBUG:
|
|
||||||
with open(exe, "rb") as f:
|
|
||||||
response = HttpResponse(
|
|
||||||
f.read(),
|
|
||||||
content_type="application/vnd.microsoft.portable-executable",
|
|
||||||
)
|
|
||||||
response["Content-Disposition"] = f"inline; filename={file_name}"
|
|
||||||
return response
|
|
||||||
else:
|
|
||||||
response = HttpResponse()
|
|
||||||
response["Content-Disposition"] = f"attachment; filename={file_name}"
|
|
||||||
response["X-Accel-Redirect"] = f"/private/exe/{file_name}"
|
|
||||||
return response
|
|
||||||
|
|
||||||
elif request.data["installMethod"] == "manual":
|
elif request.data["installMethod"] == "manual":
|
||||||
cmd = [
|
cmd = [
|
||||||
@@ -579,12 +430,10 @@ def install_agent(request):
|
|||||||
"/VERYSILENT",
|
"/VERYSILENT",
|
||||||
"/SUPPRESSMSGBOXES",
|
"/SUPPRESSMSGBOXES",
|
||||||
"&&",
|
"&&",
|
||||||
"timeout",
|
"ping",
|
||||||
"/t",
|
"127.0.0.1",
|
||||||
"10",
|
"-n",
|
||||||
"/nobreak",
|
"5",
|
||||||
">",
|
|
||||||
"NUL",
|
|
||||||
"&&",
|
"&&",
|
||||||
r'"C:\Program Files\TacticalAgent\tacticalrmm.exe"',
|
r'"C:\Program Files\TacticalAgent\tacticalrmm.exe"',
|
||||||
"-m",
|
"-m",
|
||||||
@@ -611,8 +460,6 @@ def install_agent(request):
|
|||||||
resp = {
|
resp = {
|
||||||
"cmd": " ".join(str(i) for i in cmd),
|
"cmd": " ".join(str(i) for i in cmd),
|
||||||
"url": download_url,
|
"url": download_url,
|
||||||
"salt64": settings.SALT_64,
|
|
||||||
"salt32": settings.SALT_32,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return Response(resp)
|
return Response(resp)
|
||||||
@@ -673,23 +520,18 @@ def recover(request):
|
|||||||
return notify_error("Only available in agent version greater than 0.9.5")
|
return notify_error("Only available in agent version greater than 0.9.5")
|
||||||
|
|
||||||
if not agent.has_nats:
|
if not agent.has_nats:
|
||||||
if mode == "tacagent" or mode == "checkrunner" or mode == "rpc":
|
if mode == "tacagent" or mode == "rpc":
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
return notify_error("Requires agent version 1.1.0 or greater")
|
||||||
|
|
||||||
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
||||||
if agent.has_nats:
|
if agent.has_nats:
|
||||||
if (
|
if mode == "tacagent" or mode == "mesh":
|
||||||
mode == "tacagent"
|
|
||||||
or mode == "checkrunner"
|
|
||||||
or mode == "salt"
|
|
||||||
or mode == "mesh"
|
|
||||||
):
|
|
||||||
data = {"func": "recover", "payload": {"mode": mode}}
|
data = {"func": "recover", "payload": {"mode": mode}}
|
||||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||||
if r == "ok":
|
if r == "ok":
|
||||||
return Response("Successfully completed recovery")
|
return Response("Successfully completed recovery")
|
||||||
|
|
||||||
if agent.recoveryactions.filter(last_run=None).exists():
|
if agent.recoveryactions.filter(last_run=None).exists(): # type: ignore
|
||||||
return notify_error(
|
return notify_error(
|
||||||
"A recovery action is currently pending. Please wait for the next agent check-in."
|
"A recovery action is currently pending. Please wait for the next agent check-in."
|
||||||
)
|
)
|
||||||
@@ -717,10 +559,9 @@ def recover(request):
|
|||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
def run_script(request):
|
def run_script(request):
|
||||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||||
if not agent.has_nats:
|
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
|
||||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||||
output = request.data["output"]
|
output = request.data["output"]
|
||||||
|
args = request.data["args"]
|
||||||
req_timeout = int(request.data["timeout"]) + 3
|
req_timeout = int(request.data["timeout"]) + 3
|
||||||
|
|
||||||
AuditLog.audit_script_run(
|
AuditLog.audit_script_run(
|
||||||
@@ -729,23 +570,13 @@ def run_script(request):
|
|||||||
script=script.name,
|
script=script.name,
|
||||||
)
|
)
|
||||||
|
|
||||||
data = {
|
|
||||||
"func": "runscript",
|
|
||||||
"timeout": request.data["timeout"],
|
|
||||||
"script_args": request.data["args"],
|
|
||||||
"payload": {
|
|
||||||
"code": script.code,
|
|
||||||
"shell": script.shell,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if output == "wait":
|
if output == "wait":
|
||||||
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
|
r = agent.run_script(
|
||||||
|
scriptpk=script.pk, args=args, timeout=req_timeout, wait=True
|
||||||
|
)
|
||||||
return Response(r)
|
return Response(r)
|
||||||
elif output == "email":
|
|
||||||
if not pyver.parse(agent.version) >= pyver.parse("1.1.12"):
|
|
||||||
return notify_error("Requires agent version 1.1.12 or greater")
|
|
||||||
|
|
||||||
|
elif output == "email":
|
||||||
emails = (
|
emails = (
|
||||||
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||||
)
|
)
|
||||||
@@ -753,13 +584,13 @@ def run_script(request):
|
|||||||
agentpk=agent.pk,
|
agentpk=agent.pk,
|
||||||
scriptpk=script.pk,
|
scriptpk=script.pk,
|
||||||
nats_timeout=req_timeout,
|
nats_timeout=req_timeout,
|
||||||
nats_data=data,
|
|
||||||
emails=emails,
|
emails=emails,
|
||||||
|
args=args,
|
||||||
)
|
)
|
||||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
|
||||||
else:
|
else:
|
||||||
asyncio.run(agent.nats_cmd(data, wait=False))
|
agent.run_script(scriptpk=script.pk, args=args, timeout=req_timeout)
|
||||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
|
||||||
|
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||||
|
|
||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
@@ -840,7 +671,7 @@ def bulk(request):
|
|||||||
elif request.data["target"] == "agents":
|
elif request.data["target"] == "agents":
|
||||||
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||||
elif request.data["target"] == "all":
|
elif request.data["target"] == "all":
|
||||||
q = Agent.objects.all()
|
q = Agent.objects.only("pk", "monitoring_type")
|
||||||
else:
|
else:
|
||||||
return notify_error("Something went wrong")
|
return notify_error("Something went wrong")
|
||||||
|
|
||||||
@@ -849,8 +680,7 @@ def bulk(request):
|
|||||||
elif request.data["monType"] == "workstations":
|
elif request.data["monType"] == "workstations":
|
||||||
q = q.filter(monitoring_type="workstation")
|
q = q.filter(monitoring_type="workstation")
|
||||||
|
|
||||||
minions = [agent.salt_id for agent in q]
|
agents: list[int] = [agent.pk for agent in q]
|
||||||
agents = [agent.pk for agent in q]
|
|
||||||
|
|
||||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||||
|
|
||||||
@@ -868,14 +698,12 @@ def bulk(request):
|
|||||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||||
|
|
||||||
elif request.data["mode"] == "install":
|
elif request.data["mode"] == "install":
|
||||||
r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates")
|
bulk_install_updates_task.delay(agents)
|
||||||
if r == "timeout":
|
|
||||||
return notify_error("Salt API not running")
|
|
||||||
return Response(
|
return Response(
|
||||||
f"Pending updates will now be installed on {len(agents)} agents"
|
f"Pending updates will now be installed on {len(agents)} agents"
|
||||||
)
|
)
|
||||||
elif request.data["mode"] == "scan":
|
elif request.data["mode"] == "scan":
|
||||||
bulk_check_for_updates_task.delay(minions=minions)
|
bulk_check_for_updates_task.delay(agents)
|
||||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||||
|
|
||||||
return notify_error("Something went wrong")
|
return notify_error("Something went wrong")
|
||||||
@@ -883,20 +711,43 @@ def bulk(request):
|
|||||||
|
|
||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
def agent_counts(request):
|
def agent_counts(request):
|
||||||
|
|
||||||
|
server_offline_count = len(
|
||||||
|
[
|
||||||
|
agent
|
||||||
|
for agent in Agent.objects.filter(monitoring_type="server").only(
|
||||||
|
"pk",
|
||||||
|
"last_seen",
|
||||||
|
"overdue_time",
|
||||||
|
"offline_time",
|
||||||
|
)
|
||||||
|
if not agent.status == "online"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
workstation_offline_count = len(
|
||||||
|
[
|
||||||
|
agent
|
||||||
|
for agent in Agent.objects.filter(monitoring_type="workstation").only(
|
||||||
|
"pk",
|
||||||
|
"last_seen",
|
||||||
|
"overdue_time",
|
||||||
|
"offline_time",
|
||||||
|
)
|
||||||
|
if not agent.status == "online"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"total_server_count": Agent.objects.filter(
|
"total_server_count": Agent.objects.filter(
|
||||||
monitoring_type="server"
|
monitoring_type="server"
|
||||||
).count(),
|
).count(),
|
||||||
"total_server_offline_count": AgentOutage.objects.filter(
|
"total_server_offline_count": server_offline_count,
|
||||||
recovery_time=None, agent__monitoring_type="server"
|
|
||||||
).count(),
|
|
||||||
"total_workstation_count": Agent.objects.filter(
|
"total_workstation_count": Agent.objects.filter(
|
||||||
monitoring_type="workstation"
|
monitoring_type="workstation"
|
||||||
).count(),
|
).count(),
|
||||||
"total_workstation_offline_count": AgentOutage.objects.filter(
|
"total_workstation_offline_count": workstation_offline_count,
|
||||||
recovery_time=None, agent__monitoring_type="workstation"
|
|
||||||
).count(),
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from .models import Alert
|
from .models import Alert, AlertTemplate
|
||||||
|
|
||||||
|
|
||||||
admin.site.register(Alert)
|
admin.site.register(Alert)
|
||||||
|
admin.site.register(AlertTemplate)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.1 on 2020-08-15 15:31
|
# Generated by Django 3.1 on 2020-08-15 15:31
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
@@ -42,4 +42,4 @@ class Migration(migrations.Migration):
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
@@ -27,4 +27,4 @@ class Migration(migrations.Migration):
|
|||||||
max_length=100,
|
max_length=100,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.1.2 on 2020-10-21 18:15
|
# Generated by Django 3.1.2 on 2020-10-21 18:15
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
@@ -28,4 +28,4 @@ class Migration(migrations.Migration):
|
|||||||
name="alert_time",
|
name="alert_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
172
api/tacticalrmm/alerts/migrations/0004_auto_20210212_1408.py
Normal file
172
api/tacticalrmm/alerts/migrations/0004_auto_20210212_1408.py
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-12 14:08
|
||||||
|
|
||||||
|
import django.contrib.postgres.fields
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0029_delete_agentoutage'),
|
||||||
|
('clients', '0008_auto_20201103_1430'),
|
||||||
|
('autotasks', '0017_auto_20210210_1512'),
|
||||||
|
('scripts', '0005_auto_20201207_1606'),
|
||||||
|
('alerts', '0003_auto_20201021_1815'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='action_execution_time',
|
||||||
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='action_retcode',
|
||||||
|
field=models.IntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='action_run',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='action_stderr',
|
||||||
|
field=models.TextField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='action_stdout',
|
||||||
|
field=models.TextField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='action_timeout',
|
||||||
|
field=models.PositiveIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='alert_type',
|
||||||
|
field=models.CharField(choices=[('availability', 'Availability'), ('check', 'Check'), ('task', 'Task'), ('custom', 'Custom')], default='availability', max_length=20),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='assigned_task',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='autotasks.automatedtask'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='email_sent',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='hidden',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='resolved_action_execution_time',
|
||||||
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='resolved_action_retcode',
|
||||||
|
field=models.IntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='resolved_action_run',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='resolved_action_stderr',
|
||||||
|
field=models.TextField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='resolved_action_stdout',
|
||||||
|
field=models.TextField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='resolved_action_timeout',
|
||||||
|
field=models.PositiveIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='resolved_email_sent',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='resolved_on',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='resolved_sms_sent',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='sms_sent',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alert',
|
||||||
|
name='snoozed',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='alert',
|
||||||
|
name='severity',
|
||||||
|
field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='AlertTemplate',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('name', models.CharField(max_length=100)),
|
||||||
|
('is_active', models.BooleanField(default=True)),
|
||||||
|
('action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)),
|
||||||
|
('resolved_action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)),
|
||||||
|
('email_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)),
|
||||||
|
('text_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)),
|
||||||
|
('email_from', models.EmailField(blank=True, max_length=254, null=True)),
|
||||||
|
('agent_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('agent_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('agent_include_desktops', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('agent_always_email', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('agent_always_text', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('agent_always_alert', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('agent_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
|
||||||
|
('check_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||||
|
('check_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||||
|
('check_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||||
|
('check_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('check_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('check_always_email', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('check_always_text', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('check_always_alert', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('check_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
|
||||||
|
('task_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||||
|
('task_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||||
|
('task_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||||
|
('task_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('task_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('task_always_email', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('task_always_text', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('task_always_alert', models.BooleanField(blank=True, default=False, null=True)),
|
||||||
|
('task_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
|
||||||
|
('action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='alert_template', to='scripts.script')),
|
||||||
|
('excluded_agents', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='agents.Agent')),
|
||||||
|
('excluded_clients', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Client')),
|
||||||
|
('excluded_sites', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Site')),
|
||||||
|
('resolved_action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_alert_template', to='scripts.script')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
31
api/tacticalrmm/alerts/migrations/0005_auto_20210212_1745.py
Normal file
31
api/tacticalrmm/alerts/migrations/0005_auto_20210212_1745.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-12 17:45
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('alerts', '0004_auto_20210212_1408'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='alert',
|
||||||
|
name='action_timeout',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='alert',
|
||||||
|
name='resolved_action_timeout',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='action_timeout',
|
||||||
|
field=models.PositiveIntegerField(default=15),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='resolved_action_timeout',
|
||||||
|
field=models.PositiveIntegerField(default=15),
|
||||||
|
),
|
||||||
|
]
|
||||||
72
api/tacticalrmm/alerts/migrations/0006_auto_20210217_1736.py
Normal file
72
api/tacticalrmm/alerts/migrations/0006_auto_20210217_1736.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# Generated by Django 3.1.6 on 2021-02-17 17:36
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('alerts', '0005_auto_20210212_1745'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='agent_include_desktops',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='exclude_servers',
|
||||||
|
field=models.BooleanField(blank=True, default=False, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='exclude_workstations',
|
||||||
|
field=models.BooleanField(blank=True, default=False, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='agent_always_alert',
|
||||||
|
field=models.BooleanField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='agent_always_email',
|
||||||
|
field=models.BooleanField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='agent_always_text',
|
||||||
|
field=models.BooleanField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='check_always_alert',
|
||||||
|
field=models.BooleanField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='check_always_email',
|
||||||
|
field=models.BooleanField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='check_always_text',
|
||||||
|
field=models.BooleanField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='task_always_alert',
|
||||||
|
field=models.BooleanField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='task_always_email',
|
||||||
|
field=models.BooleanField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='alerttemplate',
|
||||||
|
name='task_always_text',
|
||||||
|
field=models.BooleanField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,5 +1,20 @@
|
|||||||
from django.db import models
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Union
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.postgres.fields import ArrayField
|
||||||
|
from django.db import models
|
||||||
|
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from agents.models import Agent
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
SEVERITY_CHOICES = [
|
SEVERITY_CHOICES = [
|
||||||
("info", "Informational"),
|
("info", "Informational"),
|
||||||
@@ -7,6 +22,13 @@ SEVERITY_CHOICES = [
|
|||||||
("error", "Error"),
|
("error", "Error"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
ALERT_TYPE_CHOICES = [
|
||||||
|
("availability", "Availability"),
|
||||||
|
("check", "Check"),
|
||||||
|
("task", "Task"),
|
||||||
|
("custom", "Custom"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class Alert(models.Model):
|
class Alert(models.Model):
|
||||||
agent = models.ForeignKey(
|
agent = models.ForeignKey(
|
||||||
@@ -23,21 +45,554 @@ class Alert(models.Model):
|
|||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
)
|
)
|
||||||
|
assigned_task = models.ForeignKey(
|
||||||
|
"autotasks.AutomatedTask",
|
||||||
|
related_name="alert",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
)
|
||||||
|
alert_type = models.CharField(
|
||||||
|
max_length=20, choices=ALERT_TYPE_CHOICES, default="availability"
|
||||||
|
)
|
||||||
message = models.TextField(null=True, blank=True)
|
message = models.TextField(null=True, blank=True)
|
||||||
alert_time = models.DateTimeField(auto_now_add=True, null=True)
|
alert_time = models.DateTimeField(auto_now_add=True, null=True, blank=True)
|
||||||
|
snoozed = models.BooleanField(default=False)
|
||||||
snooze_until = models.DateTimeField(null=True, blank=True)
|
snooze_until = models.DateTimeField(null=True, blank=True)
|
||||||
resolved = models.BooleanField(default=False)
|
resolved = models.BooleanField(default=False)
|
||||||
severity = models.CharField(
|
resolved_on = models.DateTimeField(null=True, blank=True)
|
||||||
max_length=100, choices=SEVERITY_CHOICES, default="info"
|
severity = models.CharField(max_length=30, choices=SEVERITY_CHOICES, default="info")
|
||||||
|
email_sent = models.DateTimeField(null=True, blank=True)
|
||||||
|
resolved_email_sent = models.DateTimeField(null=True, blank=True)
|
||||||
|
sms_sent = models.DateTimeField(null=True, blank=True)
|
||||||
|
resolved_sms_sent = models.DateTimeField(null=True, blank=True)
|
||||||
|
hidden = models.BooleanField(default=False)
|
||||||
|
action_run = models.DateTimeField(null=True, blank=True)
|
||||||
|
action_stdout = models.TextField(null=True, blank=True)
|
||||||
|
action_stderr = models.TextField(null=True, blank=True)
|
||||||
|
action_retcode = models.IntegerField(null=True, blank=True)
|
||||||
|
action_execution_time = models.CharField(max_length=100, null=True, blank=True)
|
||||||
|
resolved_action_run = models.DateTimeField(null=True, blank=True)
|
||||||
|
resolved_action_stdout = models.TextField(null=True, blank=True)
|
||||||
|
resolved_action_stderr = models.TextField(null=True, blank=True)
|
||||||
|
resolved_action_retcode = models.IntegerField(null=True, blank=True)
|
||||||
|
resolved_action_execution_time = models.CharField(
|
||||||
|
max_length=100, null=True, blank=True
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.message
|
return self.message
|
||||||
|
|
||||||
@classmethod
|
def resolve(self):
|
||||||
def create_availability_alert(cls, agent):
|
self.resolved = True
|
||||||
pass
|
self.resolved_on = djangotime.now()
|
||||||
|
self.snoozed = False
|
||||||
|
self.snooze_until = None
|
||||||
|
self.save()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_check_alert(cls, check):
|
def create_or_return_availability_alert(cls, agent):
|
||||||
pass
|
if not cls.objects.filter(agent=agent, resolved=False).exists():
|
||||||
|
return cls.objects.create(
|
||||||
|
agent=agent,
|
||||||
|
alert_type="availability",
|
||||||
|
severity="error",
|
||||||
|
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return cls.objects.get(agent=agent, resolved=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_or_return_check_alert(cls, check):
|
||||||
|
|
||||||
|
if not cls.objects.filter(assigned_check=check, resolved=False).exists():
|
||||||
|
return cls.objects.create(
|
||||||
|
assigned_check=check,
|
||||||
|
alert_type="check",
|
||||||
|
severity=check.alert_severity,
|
||||||
|
message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return cls.objects.get(assigned_check=check, resolved=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_or_return_task_alert(cls, task):
|
||||||
|
|
||||||
|
if not cls.objects.filter(assigned_task=task, resolved=False).exists():
|
||||||
|
return cls.objects.create(
|
||||||
|
assigned_task=task,
|
||||||
|
alert_type="task",
|
||||||
|
severity=task.alert_severity,
|
||||||
|
message=f"{task.agent.hostname} has task: {task.name} that failed.",
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return cls.objects.get(assigned_task=task, resolved=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def handle_alert_failure(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
|
||||||
|
from agents.models import Agent
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
# set variables
|
||||||
|
dashboard_severities = None
|
||||||
|
email_severities = None
|
||||||
|
text_severities = None
|
||||||
|
always_dashboard = None
|
||||||
|
always_email = None
|
||||||
|
always_text = None
|
||||||
|
alert_interval = None
|
||||||
|
email_task = None
|
||||||
|
text_task = None
|
||||||
|
|
||||||
|
# check what the instance passed is
|
||||||
|
if isinstance(instance, Agent):
|
||||||
|
from agents.tasks import agent_outage_email_task, agent_outage_sms_task
|
||||||
|
|
||||||
|
email_task = agent_outage_email_task
|
||||||
|
text_task = agent_outage_sms_task
|
||||||
|
|
||||||
|
email_alert = instance.overdue_email_alert
|
||||||
|
text_alert = instance.overdue_text_alert
|
||||||
|
dashboard_alert = instance.overdue_dashboard_alert
|
||||||
|
alert_template = instance.get_alert_template()
|
||||||
|
maintenance_mode = instance.maintenance_mode
|
||||||
|
alert_severity = "error"
|
||||||
|
agent = instance
|
||||||
|
|
||||||
|
# set alert_template settings
|
||||||
|
if alert_template:
|
||||||
|
dashboard_severities = ["error"]
|
||||||
|
email_severities = ["error"]
|
||||||
|
text_severities = ["error"]
|
||||||
|
always_dashboard = alert_template.agent_always_alert
|
||||||
|
always_email = alert_template.agent_always_email
|
||||||
|
always_text = alert_template.agent_always_text
|
||||||
|
alert_interval = alert_template.agent_periodic_alert_days
|
||||||
|
|
||||||
|
if instance.should_create_alert(alert_template):
|
||||||
|
alert = cls.create_or_return_availability_alert(instance)
|
||||||
|
else:
|
||||||
|
# check if there is an alert that exists
|
||||||
|
if cls.objects.filter(agent=instance, resolved=False).exists():
|
||||||
|
alert = cls.objects.get(agent=instance, resolved=False)
|
||||||
|
else:
|
||||||
|
alert = None
|
||||||
|
|
||||||
|
elif isinstance(instance, Check):
|
||||||
|
from checks.tasks import (
|
||||||
|
handle_check_email_alert_task,
|
||||||
|
handle_check_sms_alert_task,
|
||||||
|
)
|
||||||
|
|
||||||
|
email_task = handle_check_email_alert_task
|
||||||
|
text_task = handle_check_sms_alert_task
|
||||||
|
|
||||||
|
email_alert = instance.email_alert
|
||||||
|
text_alert = instance.text_alert
|
||||||
|
dashboard_alert = instance.dashboard_alert
|
||||||
|
alert_template = instance.agent.get_alert_template()
|
||||||
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
|
alert_severity = instance.alert_severity
|
||||||
|
agent = instance.agent
|
||||||
|
|
||||||
|
# set alert_template settings
|
||||||
|
if alert_template:
|
||||||
|
dashboard_severities = alert_template.check_dashboard_alert_severity
|
||||||
|
email_severities = alert_template.check_email_alert_severity
|
||||||
|
text_severities = alert_template.check_text_alert_severity
|
||||||
|
always_dashboard = alert_template.check_always_alert
|
||||||
|
always_email = alert_template.check_always_email
|
||||||
|
always_text = alert_template.check_always_text
|
||||||
|
alert_interval = alert_template.check_periodic_alert_days
|
||||||
|
|
||||||
|
if instance.should_create_alert(alert_template):
|
||||||
|
alert = cls.create_or_return_check_alert(instance)
|
||||||
|
else:
|
||||||
|
# check if there is an alert that exists
|
||||||
|
if cls.objects.filter(assigned_check=instance, resolved=False).exists():
|
||||||
|
alert = cls.objects.get(assigned_check=instance, resolved=False)
|
||||||
|
else:
|
||||||
|
alert = None
|
||||||
|
|
||||||
|
elif isinstance(instance, AutomatedTask):
|
||||||
|
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
|
||||||
|
|
||||||
|
email_task = handle_task_email_alert
|
||||||
|
text_task = handle_task_sms_alert
|
||||||
|
|
||||||
|
email_alert = instance.email_alert
|
||||||
|
text_alert = instance.text_alert
|
||||||
|
dashboard_alert = instance.dashboard_alert
|
||||||
|
alert_template = instance.agent.get_alert_template()
|
||||||
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
|
alert_severity = instance.alert_severity
|
||||||
|
agent = instance.agent
|
||||||
|
|
||||||
|
# set alert_template settings
|
||||||
|
if alert_template:
|
||||||
|
dashboard_severities = alert_template.task_dashboard_alert_severity
|
||||||
|
email_severities = alert_template.task_email_alert_severity
|
||||||
|
text_severities = alert_template.task_text_alert_severity
|
||||||
|
always_dashboard = alert_template.task_always_alert
|
||||||
|
always_email = alert_template.task_always_email
|
||||||
|
always_text = alert_template.task_always_text
|
||||||
|
alert_interval = alert_template.task_periodic_alert_days
|
||||||
|
|
||||||
|
if instance.should_create_alert(alert_template):
|
||||||
|
alert = cls.create_or_return_task_alert(instance)
|
||||||
|
else:
|
||||||
|
# check if there is an alert that exists
|
||||||
|
if cls.objects.filter(assigned_task=instance, resolved=False).exists():
|
||||||
|
alert = cls.objects.get(assigned_task=instance, resolved=False)
|
||||||
|
else:
|
||||||
|
alert = None
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
# return if agent is in maintenance mode
|
||||||
|
if maintenance_mode or not alert:
|
||||||
|
return
|
||||||
|
|
||||||
|
# check if alert severity changed on check and update the alert
|
||||||
|
if alert_severity != alert.severity:
|
||||||
|
alert.severity = alert_severity
|
||||||
|
alert.save(update_fields=["severity"])
|
||||||
|
|
||||||
|
# create alert in dashboard if enabled
|
||||||
|
if dashboard_alert or always_dashboard:
|
||||||
|
|
||||||
|
# check if alert template is set and specific severities are configured
|
||||||
|
if alert_template and alert.severity not in dashboard_severities: # type: ignore
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
alert.hidden = False
|
||||||
|
alert.save()
|
||||||
|
|
||||||
|
# send email if enabled
|
||||||
|
if email_alert or always_email:
|
||||||
|
|
||||||
|
# check if alert template is set and specific severities are configured
|
||||||
|
if alert_template and alert.severity not in email_severities: # type: ignore
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
email_task.delay(
|
||||||
|
pk=alert.pk,
|
||||||
|
alert_interval=alert_interval,
|
||||||
|
)
|
||||||
|
|
||||||
|
# send text if enabled
|
||||||
|
if text_alert or always_text:
|
||||||
|
|
||||||
|
# check if alert template is set and specific severities are configured
|
||||||
|
if alert_template and alert.severity not in text_severities: # type: ignore
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||||
|
|
||||||
|
# check if any scripts should be run
|
||||||
|
if alert_template and alert_template.action and not alert.action_run:
|
||||||
|
r = agent.run_script(
|
||||||
|
scriptpk=alert_template.action.pk,
|
||||||
|
args=alert_template.action_args,
|
||||||
|
timeout=alert_template.action_timeout,
|
||||||
|
wait=True,
|
||||||
|
full=True,
|
||||||
|
run_on_any=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# command was successful
|
||||||
|
if type(r) == dict:
|
||||||
|
alert.action_retcode = r["retcode"]
|
||||||
|
alert.action_stdout = r["stdout"]
|
||||||
|
alert.action_stderr = r["stderr"]
|
||||||
|
alert.action_execution_time = "{:.4f}".format(r["execution_time"])
|
||||||
|
alert.action_run = djangotime.now()
|
||||||
|
alert.save()
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname} failure alert"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def handle_alert_resolve(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
|
||||||
|
from agents.models import Agent
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
# set variables
|
||||||
|
email_on_resolved = False
|
||||||
|
text_on_resolved = False
|
||||||
|
resolved_email_task = None
|
||||||
|
resolved_text_task = None
|
||||||
|
|
||||||
|
# check what the instance passed is
|
||||||
|
if isinstance(instance, Agent):
|
||||||
|
from agents.tasks import agent_recovery_email_task, agent_recovery_sms_task
|
||||||
|
|
||||||
|
resolved_email_task = agent_recovery_email_task
|
||||||
|
resolved_text_task = agent_recovery_sms_task
|
||||||
|
|
||||||
|
alert_template = instance.get_alert_template()
|
||||||
|
alert = cls.objects.get(agent=instance, resolved=False)
|
||||||
|
maintenance_mode = instance.maintenance_mode
|
||||||
|
agent = instance
|
||||||
|
|
||||||
|
if alert_template:
|
||||||
|
email_on_resolved = alert_template.agent_email_on_resolved
|
||||||
|
text_on_resolved = alert_template.agent_text_on_resolved
|
||||||
|
|
||||||
|
elif isinstance(instance, Check):
|
||||||
|
from checks.tasks import (
|
||||||
|
handle_resolved_check_email_alert_task,
|
||||||
|
handle_resolved_check_sms_alert_task,
|
||||||
|
)
|
||||||
|
|
||||||
|
resolved_email_task = handle_resolved_check_email_alert_task
|
||||||
|
resolved_text_task = handle_resolved_check_sms_alert_task
|
||||||
|
|
||||||
|
alert_template = instance.agent.get_alert_template()
|
||||||
|
alert = cls.objects.get(assigned_check=instance, resolved=False)
|
||||||
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
|
agent = instance.agent
|
||||||
|
|
||||||
|
if alert_template:
|
||||||
|
email_on_resolved = alert_template.check_email_on_resolved
|
||||||
|
text_on_resolved = alert_template.check_text_on_resolved
|
||||||
|
|
||||||
|
elif isinstance(instance, AutomatedTask):
|
||||||
|
from autotasks.tasks import (
|
||||||
|
handle_resolved_task_email_alert,
|
||||||
|
handle_resolved_task_sms_alert,
|
||||||
|
)
|
||||||
|
|
||||||
|
resolved_email_task = handle_resolved_task_email_alert
|
||||||
|
resolved_text_task = handle_resolved_task_sms_alert
|
||||||
|
|
||||||
|
alert_template = instance.agent.get_alert_template()
|
||||||
|
alert = cls.objects.get(assigned_task=instance, resolved=False)
|
||||||
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
|
agent = instance.agent
|
||||||
|
|
||||||
|
if alert_template:
|
||||||
|
email_on_resolved = alert_template.task_email_on_resolved
|
||||||
|
text_on_resolved = alert_template.task_text_on_resolved
|
||||||
|
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
# return if agent is in maintenance mode
|
||||||
|
if maintenance_mode:
|
||||||
|
return
|
||||||
|
|
||||||
|
alert.resolve()
|
||||||
|
|
||||||
|
# check if a resolved email notification should be send
|
||||||
|
if email_on_resolved and not alert.resolved_email_sent:
|
||||||
|
resolved_email_task.delay(pk=alert.pk)
|
||||||
|
|
||||||
|
# check if resolved text should be sent
|
||||||
|
if text_on_resolved and not alert.resolved_sms_sent:
|
||||||
|
resolved_text_task.delay(pk=alert.pk)
|
||||||
|
|
||||||
|
# check if resolved script should be run
|
||||||
|
if (
|
||||||
|
alert_template
|
||||||
|
and alert_template.resolved_action
|
||||||
|
and not alert.resolved_action_run
|
||||||
|
):
|
||||||
|
r = agent.run_script(
|
||||||
|
scriptpk=alert_template.resolved_action.pk,
|
||||||
|
args=alert_template.resolved_action_args,
|
||||||
|
timeout=alert_template.resolved_action_timeout,
|
||||||
|
wait=True,
|
||||||
|
full=True,
|
||||||
|
run_on_any=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# command was successful
|
||||||
|
if type(r) == dict:
|
||||||
|
alert.resolved_action_retcode = r["retcode"]
|
||||||
|
alert.resolved_action_stdout = r["stdout"]
|
||||||
|
alert.resolved_action_stderr = r["stderr"]
|
||||||
|
alert.resolved_action_execution_time = "{:.4f}".format(
|
||||||
|
r["execution_time"]
|
||||||
|
)
|
||||||
|
alert.resolved_action_run = djangotime.now()
|
||||||
|
alert.save()
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AlertTemplate(models.Model):
|
||||||
|
name = models.CharField(max_length=100)
|
||||||
|
is_active = models.BooleanField(default=True)
|
||||||
|
|
||||||
|
action = models.ForeignKey(
|
||||||
|
"scripts.Script",
|
||||||
|
related_name="alert_template",
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
)
|
||||||
|
action_args = ArrayField(
|
||||||
|
models.CharField(max_length=255, null=True, blank=True),
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
action_timeout = models.PositiveIntegerField(default=15)
|
||||||
|
resolved_action = models.ForeignKey(
|
||||||
|
"scripts.Script",
|
||||||
|
related_name="resolved_alert_template",
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
)
|
||||||
|
resolved_action_args = ArrayField(
|
||||||
|
models.CharField(max_length=255, null=True, blank=True),
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
resolved_action_timeout = models.PositiveIntegerField(default=15)
|
||||||
|
|
||||||
|
# overrides the global recipients
|
||||||
|
email_recipients = ArrayField(
|
||||||
|
models.CharField(max_length=100, blank=True),
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
text_recipients = ArrayField(
|
||||||
|
models.CharField(max_length=100, blank=True),
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
|
||||||
|
# overrides the from address
|
||||||
|
email_from = models.EmailField(blank=True, null=True)
|
||||||
|
|
||||||
|
# agent alert settings
|
||||||
|
agent_email_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||||
|
agent_text_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||||
|
agent_always_email = BooleanField(null=True, blank=True, default=None)
|
||||||
|
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
||||||
|
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||||
|
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||||
|
|
||||||
|
# check alert settings
|
||||||
|
check_email_alert_severity = ArrayField(
|
||||||
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
check_text_alert_severity = ArrayField(
|
||||||
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
check_dashboard_alert_severity = ArrayField(
|
||||||
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
check_email_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||||
|
check_text_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||||
|
check_always_email = BooleanField(null=True, blank=True, default=None)
|
||||||
|
check_always_text = BooleanField(null=True, blank=True, default=None)
|
||||||
|
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||||
|
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||||
|
|
||||||
|
# task alert settings
|
||||||
|
task_email_alert_severity = ArrayField(
|
||||||
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
task_text_alert_severity = ArrayField(
|
||||||
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
task_dashboard_alert_severity = ArrayField(
|
||||||
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
task_email_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||||
|
task_text_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||||
|
task_always_email = BooleanField(null=True, blank=True, default=None)
|
||||||
|
task_always_text = BooleanField(null=True, blank=True, default=None)
|
||||||
|
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||||
|
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||||
|
|
||||||
|
# exclusion settings
|
||||||
|
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
||||||
|
exclude_servers = BooleanField(null=True, blank=True, default=False)
|
||||||
|
|
||||||
|
excluded_sites = models.ManyToManyField(
|
||||||
|
"clients.Site", related_name="alert_exclusions", blank=True
|
||||||
|
)
|
||||||
|
excluded_clients = models.ManyToManyField(
|
||||||
|
"clients.Client", related_name="alert_exclusions", blank=True
|
||||||
|
)
|
||||||
|
excluded_agents = models.ManyToManyField(
|
||||||
|
"agents.Agent", related_name="alert_exclusions", blank=True
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_agent_settings(self) -> bool:
|
||||||
|
return (
|
||||||
|
self.agent_email_on_resolved
|
||||||
|
or self.agent_text_on_resolved
|
||||||
|
or self.agent_always_email
|
||||||
|
or self.agent_always_text
|
||||||
|
or self.agent_always_alert
|
||||||
|
or bool(self.agent_periodic_alert_days)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_check_settings(self) -> bool:
|
||||||
|
return (
|
||||||
|
bool(self.check_email_alert_severity)
|
||||||
|
or bool(self.check_text_alert_severity)
|
||||||
|
or bool(self.check_dashboard_alert_severity)
|
||||||
|
or self.check_email_on_resolved
|
||||||
|
or self.check_text_on_resolved
|
||||||
|
or self.check_always_email
|
||||||
|
or self.check_always_text
|
||||||
|
or self.check_always_alert
|
||||||
|
or bool(self.check_periodic_alert_days)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_task_settings(self) -> bool:
|
||||||
|
return (
|
||||||
|
bool(self.task_email_alert_severity)
|
||||||
|
or bool(self.task_text_alert_severity)
|
||||||
|
or bool(self.task_dashboard_alert_severity)
|
||||||
|
or self.task_email_on_resolved
|
||||||
|
or self.task_text_on_resolved
|
||||||
|
or self.task_always_email
|
||||||
|
or self.task_always_text
|
||||||
|
or self.task_always_alert
|
||||||
|
or bool(self.task_periodic_alert_days)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_core_settings(self) -> bool:
|
||||||
|
return bool(self.email_from) or self.email_recipients or self.text_recipients
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_default_template(self) -> bool:
|
||||||
|
return self.default_alert_template.exists() # type: ignore
|
||||||
|
|||||||
@@ -1,19 +1,121 @@
|
|||||||
from rest_framework.serializers import (
|
from rest_framework.fields import SerializerMethodField
|
||||||
ModelSerializer,
|
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||||
ReadOnlyField,
|
|
||||||
DateTimeField,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .models import Alert
|
from automation.serializers import PolicySerializer
|
||||||
|
from clients.serializers import ClientSerializer, SiteSerializer
|
||||||
|
from tacticalrmm.utils import get_default_timezone
|
||||||
|
|
||||||
|
from .models import Alert, AlertTemplate
|
||||||
|
|
||||||
|
|
||||||
class AlertSerializer(ModelSerializer):
|
class AlertSerializer(ModelSerializer):
|
||||||
|
|
||||||
hostname = ReadOnlyField(source="agent.hostname")
|
hostname = SerializerMethodField(read_only=True)
|
||||||
client = ReadOnlyField(source="agent.client")
|
client = SerializerMethodField(read_only=True)
|
||||||
site = ReadOnlyField(source="agent.site")
|
site = SerializerMethodField(read_only=True)
|
||||||
alert_time = DateTimeField(format="iso-8601")
|
alert_time = SerializerMethodField(read_only=True)
|
||||||
|
resolve_on = SerializerMethodField(read_only=True)
|
||||||
|
snoozed_until = SerializerMethodField(read_only=True)
|
||||||
|
|
||||||
|
def get_hostname(self, instance):
|
||||||
|
if instance.alert_type == "availability":
|
||||||
|
return instance.agent.hostname if instance.agent else ""
|
||||||
|
elif instance.alert_type == "check":
|
||||||
|
return (
|
||||||
|
instance.assigned_check.agent.hostname
|
||||||
|
if instance.assigned_check
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
elif instance.alert_type == "task":
|
||||||
|
return (
|
||||||
|
instance.assigned_task.agent.hostname if instance.assigned_task else ""
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def get_client(self, instance):
|
||||||
|
if instance.alert_type == "availability":
|
||||||
|
return instance.agent.client.name if instance.agent else ""
|
||||||
|
elif instance.alert_type == "check":
|
||||||
|
return (
|
||||||
|
instance.assigned_check.agent.client.name
|
||||||
|
if instance.assigned_check
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
elif instance.alert_type == "task":
|
||||||
|
return (
|
||||||
|
instance.assigned_task.agent.client.name
|
||||||
|
if instance.assigned_task
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def get_site(self, instance):
|
||||||
|
if instance.alert_type == "availability":
|
||||||
|
return instance.agent.site.name if instance.agent else ""
|
||||||
|
elif instance.alert_type == "check":
|
||||||
|
return (
|
||||||
|
instance.assigned_check.agent.site.name
|
||||||
|
if instance.assigned_check
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
elif instance.alert_type == "task":
|
||||||
|
return (
|
||||||
|
instance.assigned_task.agent.site.name if instance.assigned_task else ""
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def get_alert_time(self, instance):
|
||||||
|
if instance.alert_time:
|
||||||
|
return instance.alert_time.astimezone(get_default_timezone()).timestamp()
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_resolve_on(self, instance):
|
||||||
|
if instance.resolved_on:
|
||||||
|
return instance.resolved_on.astimezone(get_default_timezone()).timestamp()
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_snoozed_until(self, instance):
|
||||||
|
if instance.snooze_until:
|
||||||
|
return instance.snooze_until.astimezone(get_default_timezone()).timestamp()
|
||||||
|
return None
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Alert
|
model = Alert
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class AlertTemplateSerializer(ModelSerializer):
|
||||||
|
agent_settings = ReadOnlyField(source="has_agent_settings")
|
||||||
|
check_settings = ReadOnlyField(source="has_check_settings")
|
||||||
|
task_settings = ReadOnlyField(source="has_task_settings")
|
||||||
|
core_settings = ReadOnlyField(source="has_core_settings")
|
||||||
|
default_template = ReadOnlyField(source="is_default_template")
|
||||||
|
action_name = ReadOnlyField(source="action.name")
|
||||||
|
resolved_action_name = ReadOnlyField(source="resolved_action.name")
|
||||||
|
applied_count = SerializerMethodField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = AlertTemplate
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
def get_applied_count(self, instance):
|
||||||
|
count = 0
|
||||||
|
count += instance.policies.count()
|
||||||
|
count += instance.clients.count()
|
||||||
|
count += instance.sites.count()
|
||||||
|
return count
|
||||||
|
|
||||||
|
|
||||||
|
class AlertTemplateRelationSerializer(ModelSerializer):
|
||||||
|
policies = PolicySerializer(read_only=True, many=True)
|
||||||
|
clients = ClientSerializer(read_only=True, many=True)
|
||||||
|
sites = SiteSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = AlertTemplate
|
||||||
|
fields = "__all__"
|
||||||
|
|||||||
14
api/tacticalrmm/alerts/tasks.py
Normal file
14
api/tacticalrmm/alerts/tasks.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
|
from alerts.models import Alert
|
||||||
|
from tacticalrmm.celery import app
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def unsnooze_alerts() -> str:
|
||||||
|
|
||||||
|
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
|
||||||
|
snoozed=False, snooze_until=None
|
||||||
|
)
|
||||||
|
|
||||||
|
return "ok"
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,12 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("alerts/", views.GetAddAlerts.as_view()),
|
path("alerts/", views.GetAddAlerts.as_view()),
|
||||||
|
path("bulk/", views.BulkAlerts.as_view()),
|
||||||
path("alerts/<int:pk>/", views.GetUpdateDeleteAlert.as_view()),
|
path("alerts/<int:pk>/", views.GetUpdateDeleteAlert.as_view()),
|
||||||
|
path("alerttemplates/", views.GetAddAlertTemplates.as_view()),
|
||||||
|
path("alerttemplates/<int:pk>/", views.GetUpdateDeleteAlertTemplate.as_view()),
|
||||||
|
path("alerttemplates/<int:pk>/related/", views.RelatedAlertTemplate.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,19 +1,103 @@
|
|||||||
|
from datetime import datetime as dt
|
||||||
|
|
||||||
|
from django.db.models import Q
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
from rest_framework.views import APIView
|
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from .models import Alert
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
from .serializers import AlertSerializer
|
from .models import Alert, AlertTemplate
|
||||||
|
from .serializers import (
|
||||||
|
AlertSerializer,
|
||||||
|
AlertTemplateRelationSerializer,
|
||||||
|
AlertTemplateSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class GetAddAlerts(APIView):
|
class GetAddAlerts(APIView):
|
||||||
def get(self, request):
|
def patch(self, request):
|
||||||
alerts = Alert.objects.all()
|
|
||||||
|
|
||||||
return Response(AlertSerializer(alerts, many=True).data)
|
# top 10 alerts for dashboard icon
|
||||||
|
if "top" in request.data.keys():
|
||||||
|
alerts = Alert.objects.filter(
|
||||||
|
resolved=False, snoozed=False, hidden=False
|
||||||
|
).order_by("alert_time")[: int(request.data["top"])]
|
||||||
|
count = Alert.objects.filter(
|
||||||
|
resolved=False, snoozed=False, hidden=False
|
||||||
|
).count()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"alerts_count": count,
|
||||||
|
"alerts": AlertSerializer(alerts, many=True).data,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
elif any(
|
||||||
|
key
|
||||||
|
in [
|
||||||
|
"timeFilter",
|
||||||
|
"clientFilter",
|
||||||
|
"severityFilter",
|
||||||
|
"resolvedFilter",
|
||||||
|
"snoozedFilter",
|
||||||
|
]
|
||||||
|
for key in request.data.keys()
|
||||||
|
):
|
||||||
|
clientFilter = Q()
|
||||||
|
severityFilter = Q()
|
||||||
|
timeFilter = Q()
|
||||||
|
resolvedFilter = Q()
|
||||||
|
snoozedFilter = Q()
|
||||||
|
|
||||||
|
if (
|
||||||
|
"snoozedFilter" in request.data.keys()
|
||||||
|
and not request.data["snoozedFilter"]
|
||||||
|
):
|
||||||
|
snoozedFilter = Q(snoozed=request.data["snoozedFilter"])
|
||||||
|
|
||||||
|
if (
|
||||||
|
"resolvedFilter" in request.data.keys()
|
||||||
|
and not request.data["resolvedFilter"]
|
||||||
|
):
|
||||||
|
resolvedFilter = Q(resolved=request.data["resolvedFilter"])
|
||||||
|
|
||||||
|
if "clientFilter" in request.data.keys():
|
||||||
|
from agents.models import Agent
|
||||||
|
from clients.models import Client
|
||||||
|
|
||||||
|
clients = Client.objects.filter(
|
||||||
|
pk__in=request.data["clientFilter"]
|
||||||
|
).values_list("id")
|
||||||
|
agents = Agent.objects.filter(site__client_id__in=clients).values_list(
|
||||||
|
"id"
|
||||||
|
)
|
||||||
|
|
||||||
|
clientFilter = Q(agent__in=agents)
|
||||||
|
|
||||||
|
if "severityFilter" in request.data.keys():
|
||||||
|
severityFilter = Q(severity__in=request.data["severityFilter"])
|
||||||
|
|
||||||
|
if "timeFilter" in request.data.keys():
|
||||||
|
timeFilter = Q(
|
||||||
|
alert_time__lte=djangotime.make_aware(dt.today()),
|
||||||
|
alert_time__gt=djangotime.make_aware(dt.today())
|
||||||
|
- djangotime.timedelta(days=int(request.data["timeFilter"])),
|
||||||
|
)
|
||||||
|
|
||||||
|
alerts = (
|
||||||
|
Alert.objects.filter(clientFilter)
|
||||||
|
.filter(severityFilter)
|
||||||
|
.filter(resolvedFilter)
|
||||||
|
.filter(snoozedFilter)
|
||||||
|
.filter(timeFilter)
|
||||||
|
)
|
||||||
|
return Response(AlertSerializer(alerts, many=True).data)
|
||||||
|
|
||||||
|
else:
|
||||||
|
alerts = Alert.objects.all()
|
||||||
|
return Response(AlertSerializer(alerts, many=True).data)
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
serializer = AlertSerializer(data=request.data, partial=True)
|
serializer = AlertSerializer(data=request.data, partial=True)
|
||||||
@@ -32,7 +116,40 @@ class GetUpdateDeleteAlert(APIView):
|
|||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
alert = get_object_or_404(Alert, pk=pk)
|
alert = get_object_or_404(Alert, pk=pk)
|
||||||
|
|
||||||
serializer = AlertSerializer(instance=alert, data=request.data, partial=True)
|
data = request.data
|
||||||
|
|
||||||
|
if "type" in data.keys():
|
||||||
|
if data["type"] == "resolve":
|
||||||
|
data = {
|
||||||
|
"resolved": True,
|
||||||
|
"resolved_on": djangotime.now(),
|
||||||
|
"snoozed": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
# unable to set snooze_until to none in serialzier
|
||||||
|
alert.snooze_until = None
|
||||||
|
alert.save()
|
||||||
|
elif data["type"] == "snooze":
|
||||||
|
if "snooze_days" in data.keys():
|
||||||
|
data = {
|
||||||
|
"snoozed": True,
|
||||||
|
"snooze_until": djangotime.now()
|
||||||
|
+ djangotime.timedelta(days=int(data["snooze_days"])),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return notify_error(
|
||||||
|
"Missing 'snoozed_days' when trying to snooze alert"
|
||||||
|
)
|
||||||
|
elif data["type"] == "unsnooze":
|
||||||
|
data = {"snoozed": False}
|
||||||
|
|
||||||
|
# unable to set snooze_until to none in serialzier
|
||||||
|
alert.snooze_until = None
|
||||||
|
alert.save()
|
||||||
|
else:
|
||||||
|
return notify_error("There was an error in the request data")
|
||||||
|
|
||||||
|
serializer = AlertSerializer(instance=alert, data=data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
@@ -42,3 +159,68 @@ class GetUpdateDeleteAlert(APIView):
|
|||||||
Alert.objects.get(pk=pk).delete()
|
Alert.objects.get(pk=pk).delete()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class BulkAlerts(APIView):
|
||||||
|
def post(self, request):
|
||||||
|
if request.data["bulk_action"] == "resolve":
|
||||||
|
Alert.objects.filter(id__in=request.data["alerts"]).update(
|
||||||
|
resolved=True,
|
||||||
|
resolved_on=djangotime.now(),
|
||||||
|
snoozed=False,
|
||||||
|
snooze_until=None,
|
||||||
|
)
|
||||||
|
return Response("ok")
|
||||||
|
elif request.data["bulk_action"] == "snooze":
|
||||||
|
if "snooze_days" in request.data.keys():
|
||||||
|
Alert.objects.filter(id__in=request.data["alerts"]).update(
|
||||||
|
snoozed=True,
|
||||||
|
snooze_until=djangotime.now()
|
||||||
|
+ djangotime.timedelta(days=int(request.data["snooze_days"])),
|
||||||
|
)
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
return notify_error("The request was invalid")
|
||||||
|
|
||||||
|
|
||||||
|
class GetAddAlertTemplates(APIView):
|
||||||
|
def get(self, request):
|
||||||
|
alert_templates = AlertTemplate.objects.all()
|
||||||
|
|
||||||
|
return Response(AlertTemplateSerializer(alert_templates, many=True).data)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = AlertTemplateSerializer(data=request.data, partial=True)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class GetUpdateDeleteAlertTemplate(APIView):
|
||||||
|
def get(self, request, pk):
|
||||||
|
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||||
|
|
||||||
|
return Response(AlertTemplateSerializer(alert_template).data)
|
||||||
|
|
||||||
|
def put(self, request, pk):
|
||||||
|
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||||
|
|
||||||
|
serializer = AlertTemplateSerializer(
|
||||||
|
instance=alert_template, data=request.data, partial=True
|
||||||
|
)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def delete(self, request, pk):
|
||||||
|
get_object_or_404(AlertTemplate, pk=pk).delete()
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class RelatedAlertTemplate(APIView):
|
||||||
|
def get(self, request, pk):
|
||||||
|
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||||
|
return Response(AlertTemplateRelationSerializer(alert_template).data)
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
from django.apps import AppConfig
|
|
||||||
|
|
||||||
|
|
||||||
class Apiv2Config(AppConfig):
|
|
||||||
name = "apiv2"
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
from unittest.mock import patch
|
|
||||||
from model_bakery import baker
|
|
||||||
from itertools import cycle
|
|
||||||
|
|
||||||
|
|
||||||
class TestAPIv2(TacticalTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.authenticate()
|
|
||||||
self.setup_coresettings()
|
|
||||||
|
|
||||||
@patch("agents.models.Agent.salt_api_cmd")
|
|
||||||
def test_sync_modules(self, mock_ret):
|
|
||||||
# setup data
|
|
||||||
agent = baker.make_recipe("agents.agent")
|
|
||||||
url = "/api/v2/saltminion/"
|
|
||||||
payload = {"agent_id": agent.agent_id}
|
|
||||||
|
|
||||||
mock_ret.return_value = "error"
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
mock_ret.return_value = []
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(r.data, "Modules are already in sync")
|
|
||||||
|
|
||||||
mock_ret.return_value = ["modules.win_agent"]
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
|
||||||
|
|
||||||
mock_ret.return_value = ["askdjaskdjasd", "modules.win_agent"]
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
from . import views
|
|
||||||
from apiv3 import views as v3_views
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path("newagent/", v3_views.NewAgent.as_view()),
|
|
||||||
path("meshexe/", v3_views.MeshExe.as_view()),
|
|
||||||
path("saltminion/", v3_views.SaltMinion.as_view()),
|
|
||||||
path("<str:agentid>/saltminion/", v3_views.SaltMinion.as_view()),
|
|
||||||
path("sysinfo/", v3_views.SysInfo.as_view()),
|
|
||||||
path("hello/", v3_views.Hello.as_view()),
|
|
||||||
path("checkrunner/", views.CheckRunner.as_view()),
|
|
||||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
|
||||||
]
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
from django.shortcuts import get_object_or_404
|
|
||||||
from django.utils import timezone as djangotime
|
|
||||||
|
|
||||||
from rest_framework.authentication import TokenAuthentication
|
|
||||||
from rest_framework.permissions import IsAuthenticated
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.views import APIView
|
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from checks.models import Check
|
|
||||||
|
|
||||||
from checks.serializers import CheckRunnerGetSerializerV2
|
|
||||||
|
|
||||||
|
|
||||||
class CheckRunner(APIView):
|
|
||||||
"""
|
|
||||||
For the windows python agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
authentication_classes = [TokenAuthentication]
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
|
|
||||||
def get(self, request, agentid):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
|
||||||
agent.last_seen = djangotime.now()
|
|
||||||
agent.save(update_fields=["last_seen"])
|
|
||||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
|
||||||
|
|
||||||
ret = {
|
|
||||||
"agent": agent.pk,
|
|
||||||
"check_interval": agent.check_interval,
|
|
||||||
"checks": CheckRunnerGetSerializerV2(checks, many=True).data,
|
|
||||||
}
|
|
||||||
return Response(ret)
|
|
||||||
|
|
||||||
def patch(self, request):
|
|
||||||
check = get_object_or_404(Check, pk=request.data["id"])
|
|
||||||
check.last_run = djangotime.now()
|
|
||||||
check.save(update_fields=["last_run"])
|
|
||||||
status = check.handle_checkv2(request.data)
|
|
||||||
return Response(status)
|
|
||||||
@@ -1,11 +1,12 @@
|
|||||||
import os
|
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
|
from itertools import cycle
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
from unittest.mock import patch
|
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
from itertools import cycle
|
|
||||||
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestAPIv3(TacticalTestCase):
|
class TestAPIv3(TacticalTestCase):
|
||||||
@@ -26,38 +27,6 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
def test_get_salt_minion(self):
|
|
||||||
url = f"/api/v3/{self.agent.agent_id}/saltminion/"
|
|
||||||
url2 = f"/api/v2/{self.agent.agent_id}/saltminion/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertIn("latestVer", r.json().keys())
|
|
||||||
self.assertIn("currentVer", r.json().keys())
|
|
||||||
self.assertIn("salt_id", r.json().keys())
|
|
||||||
self.assertIn("downloadURL", r.json().keys())
|
|
||||||
|
|
||||||
r2 = self.client.get(url2)
|
|
||||||
self.assertEqual(r2.status_code, 200)
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
|
||||||
self.check_not_authenticated("get", url2)
|
|
||||||
|
|
||||||
def test_get_mesh_info(self):
|
|
||||||
url = f"/api/v3/{self.agent.pk}/meshinfo/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
|
||||||
|
|
||||||
def test_get_winupdater(self):
|
|
||||||
url = f"/api/v3/{self.agent.agent_id}/winupdater/"
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
|
||||||
|
|
||||||
def test_sysinfo(self):
|
def test_sysinfo(self):
|
||||||
# TODO replace this with golang wmi sample data
|
# TODO replace this with golang wmi sample data
|
||||||
|
|
||||||
@@ -76,28 +45,47 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
def test_hello_patch(self):
|
def test_checkrunner_interval(self):
|
||||||
url = "/api/v3/hello/"
|
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
|
||||||
payload = {
|
|
||||||
"agent_id": self.agent.agent_id,
|
|
||||||
"logged_in_username": "None",
|
|
||||||
"disks": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
payload["logged_in_username"] = "Bob"
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
|
||||||
|
|
||||||
@patch("agents.tasks.install_salt_task.delay")
|
|
||||||
def test_install_salt(self, mock_task):
|
|
||||||
url = f"/api/v3/{self.agent.agent_id}/installsalt/"
|
|
||||||
r = self.client.get(url, format="json")
|
r = self.client.get(url, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
mock_task.assert_called_with(self.agent.pk)
|
self.assertEqual(
|
||||||
|
r.json(),
|
||||||
|
{"agent": self.agent.pk, "check_interval": self.agent.check_interval},
|
||||||
|
)
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
def test_checkin_patch(self):
|
||||||
|
from logs.models import PendingAction
|
||||||
|
|
||||||
|
url = "/api/v3/checkin/"
|
||||||
|
agent_updated = baker.make_recipe("agents.agent", version="1.3.0")
|
||||||
|
PendingAction.objects.create(
|
||||||
|
agent=agent_updated,
|
||||||
|
action_type="agentupdate",
|
||||||
|
details={
|
||||||
|
"url": agent_updated.winagent_dl,
|
||||||
|
"version": agent_updated.version,
|
||||||
|
"inno": agent_updated.win_inno_exe,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
|
||||||
|
# test agent failed to update and still on same version
|
||||||
|
payload = {
|
||||||
|
"func": "hello",
|
||||||
|
"agent_id": agent_updated.agent_id,
|
||||||
|
"version": "1.3.0",
|
||||||
|
}
|
||||||
|
r = self.client.patch(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
|
||||||
|
# test agent successful update
|
||||||
|
payload["version"] = settings.LATEST_AGENT_VER
|
||||||
|
r = self.client.patch(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||||
|
self.assertEqual(action.status, "completed")
|
||||||
|
action.delete()
|
||||||
|
|||||||
@@ -1,21 +1,21 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("checkin/", views.CheckIn.as_view()),
|
|
||||||
path("hello/", views.Hello.as_view()),
|
|
||||||
path("checkrunner/", views.CheckRunner.as_view()),
|
path("checkrunner/", views.CheckRunner.as_view()),
|
||||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||||
|
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
|
||||||
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
||||||
path("saltminion/", views.SaltMinion.as_view()),
|
|
||||||
path("<str:agentid>/saltminion/", views.SaltMinion.as_view()),
|
|
||||||
path("<int:pk>/meshinfo/", views.MeshInfo.as_view()),
|
|
||||||
path("meshexe/", views.MeshExe.as_view()),
|
path("meshexe/", views.MeshExe.as_view()),
|
||||||
path("sysinfo/", views.SysInfo.as_view()),
|
path("sysinfo/", views.SysInfo.as_view()),
|
||||||
path("newagent/", views.NewAgent.as_view()),
|
path("newagent/", views.NewAgent.as_view()),
|
||||||
path("winupdater/", views.WinUpdater.as_view()),
|
|
||||||
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
|
|
||||||
path("software/", views.Software.as_view()),
|
path("software/", views.Software.as_view()),
|
||||||
path("installer/", views.Installer.as_view()),
|
path("installer/", views.Installer.as_view()),
|
||||||
path("<str:agentid>/installsalt/", views.InstallSalt.as_view()),
|
path("checkin/", views.CheckIn.as_view()),
|
||||||
|
path("syncmesh/", views.SyncMeshNodeID.as_view()),
|
||||||
|
path("choco/", views.Choco.as_view()),
|
||||||
|
path("winupdates/", views.WinUpdates.as_view()),
|
||||||
|
path("superseded/", views.SupersededWinUpdate.as_view()),
|
||||||
|
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,91 +1,79 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import os
|
import os
|
||||||
import requests
|
import time
|
||||||
from loguru import logger
|
|
||||||
from packaging import version as pyver
|
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.http import HttpResponse
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from django.http import HttpResponse
|
from loguru import logger
|
||||||
|
from packaging import version as pyver
|
||||||
|
from rest_framework.authentication import TokenAuthentication
|
||||||
|
from rest_framework.authtoken.models import Token
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from rest_framework.authentication import TokenAuthentication
|
|
||||||
from rest_framework.permissions import IsAuthenticated
|
|
||||||
from rest_framework.authtoken.models import Token
|
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from checks.models import Check
|
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
from winupdate.models import WinUpdatePolicy
|
from agents.models import Agent
|
||||||
from software.models import InstalledSoftware
|
|
||||||
from checks.serializers import CheckRunnerGetSerializerV3
|
|
||||||
from agents.serializers import WinAgentSerializer
|
from agents.serializers import WinAgentSerializer
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||||
from winupdate.serializers import ApprovedUpdateSerializer
|
from checks.models import Check
|
||||||
|
from checks.serializers import CheckRunnerGetSerializer
|
||||||
from agents.tasks import (
|
|
||||||
agent_recovery_email_task,
|
|
||||||
agent_recovery_sms_task,
|
|
||||||
sync_salt_modules_task,
|
|
||||||
install_salt_task,
|
|
||||||
)
|
|
||||||
from winupdate.tasks import check_for_updates_task
|
|
||||||
from software.tasks import install_chocolatey
|
|
||||||
from checks.utils import bytes2human
|
from checks.utils import bytes2human
|
||||||
from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList
|
from logs.models import PendingAction
|
||||||
|
from software.models import InstalledSoftware
|
||||||
|
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
||||||
|
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
class CheckIn(APIView):
|
class CheckIn(APIView):
|
||||||
"""
|
|
||||||
The agent's checkin endpoint
|
|
||||||
patch: called every 45 to 110 seconds, handles agent updates and recovery
|
|
||||||
put: called every 5 to 10 minutes, handles basic system info
|
|
||||||
post: called once on windows service startup
|
|
||||||
"""
|
|
||||||
|
|
||||||
authentication_classes = [TokenAuthentication]
|
authentication_classes = [TokenAuthentication]
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
|
from alerts.models import Alert
|
||||||
|
|
||||||
|
updated = False
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
if pyver.parse(request.data["version"]) > pyver.parse(
|
||||||
|
agent.version
|
||||||
|
) or pyver.parse(request.data["version"]) == pyver.parse(
|
||||||
|
settings.LATEST_AGENT_VER
|
||||||
|
):
|
||||||
|
updated = True
|
||||||
agent.version = request.data["version"]
|
agent.version = request.data["version"]
|
||||||
agent.last_seen = djangotime.now()
|
agent.last_seen = djangotime.now()
|
||||||
agent.save(update_fields=["version", "last_seen"])
|
agent.save(update_fields=["version", "last_seen"])
|
||||||
|
|
||||||
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
|
# change agent update pending status to completed if agent has just updated
|
||||||
last_outage = agent.agentoutages.last()
|
if (
|
||||||
last_outage.recovery_time = djangotime.now()
|
updated
|
||||||
last_outage.save(update_fields=["recovery_time"])
|
and agent.pendingactions.filter( # type: ignore
|
||||||
|
action_type="agentupdate", status="pending"
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
agent.pendingactions.filter( # type: ignore
|
||||||
|
action_type="agentupdate", status="pending"
|
||||||
|
).update(status="completed")
|
||||||
|
|
||||||
if agent.overdue_email_alert:
|
# handles any alerting actions
|
||||||
agent_recovery_email_task.delay(pk=last_outage.pk)
|
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
||||||
if agent.overdue_text_alert:
|
Alert.handle_alert_resolve(agent)
|
||||||
agent_recovery_sms_task.delay(pk=last_outage.pk)
|
|
||||||
|
|
||||||
recovery = agent.recoveryactions.filter(last_run=None).last()
|
recovery = agent.recoveryactions.filter(last_run=None).last() # type: ignore
|
||||||
if recovery is not None:
|
if recovery is not None:
|
||||||
recovery.last_run = djangotime.now()
|
recovery.last_run = djangotime.now()
|
||||||
recovery.save(update_fields=["last_run"])
|
recovery.save(update_fields=["last_run"])
|
||||||
return Response(recovery.send())
|
handle_agent_recovery_task.delay(pk=recovery.pk) # type: ignore
|
||||||
|
return Response("ok")
|
||||||
# handle agent update
|
|
||||||
if agent.pendingactions.filter(
|
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).exists():
|
|
||||||
update = agent.pendingactions.filter(
|
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).last()
|
|
||||||
update.status = "completed"
|
|
||||||
update.save(update_fields=["status"])
|
|
||||||
return Response(update.details)
|
|
||||||
|
|
||||||
# get any pending actions
|
# get any pending actions
|
||||||
if agent.pendingactions.filter(status="pending").exists():
|
if agent.pendingactions.filter(status="pending").exists(): # type: ignore
|
||||||
agent.handle_pending_actions()
|
agent.handle_pending_actions()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
@@ -93,84 +81,13 @@ class CheckIn(APIView):
|
|||||||
def put(self, request):
|
def put(self, request):
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
|
|
||||||
if "disks" in request.data.keys():
|
if request.data["func"] == "disks":
|
||||||
disks = request.data["disks"]
|
disks = request.data["disks"]
|
||||||
new = []
|
new = []
|
||||||
# python agent
|
|
||||||
if isinstance(disks, dict):
|
|
||||||
for k, v in disks.items():
|
|
||||||
new.append(v)
|
|
||||||
else:
|
|
||||||
# golang agent
|
|
||||||
for disk in disks:
|
|
||||||
tmp = {}
|
|
||||||
for k, v in disk.items():
|
|
||||||
tmp["device"] = disk["device"]
|
|
||||||
tmp["fstype"] = disk["fstype"]
|
|
||||||
tmp["total"] = bytes2human(disk["total"])
|
|
||||||
tmp["used"] = bytes2human(disk["used"])
|
|
||||||
tmp["free"] = bytes2human(disk["free"])
|
|
||||||
tmp["percent"] = int(disk["percent"])
|
|
||||||
new.append(tmp)
|
|
||||||
|
|
||||||
serializer.save(disks=new)
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
if "logged_in_username" in request.data.keys():
|
|
||||||
if request.data["logged_in_username"] != "None":
|
|
||||||
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
serializer.save()
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
|
|
||||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
serializer.save(last_seen=djangotime.now())
|
|
||||||
|
|
||||||
sync_salt_modules_task.delay(agent.pk)
|
|
||||||
check_for_updates_task.apply_async(
|
|
||||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
|
|
||||||
)
|
|
||||||
|
|
||||||
if not agent.choco_installed:
|
|
||||||
install_chocolatey.delay(agent.pk, wait=True)
|
|
||||||
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
|
|
||||||
class Hello(APIView):
|
|
||||||
#### DEPRECATED, for agents <= 1.1.9 ####
|
|
||||||
"""
|
|
||||||
The agent's checkin endpoint
|
|
||||||
patch: called every 30 to 120 seconds
|
|
||||||
post: called on agent windows service startup
|
|
||||||
"""
|
|
||||||
|
|
||||||
authentication_classes = [TokenAuthentication]
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
|
|
||||||
def patch(self, request):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
|
|
||||||
disks = request.data["disks"]
|
|
||||||
new = []
|
|
||||||
# python agent
|
|
||||||
if isinstance(disks, dict):
|
|
||||||
for k, v in disks.items():
|
|
||||||
new.append(v)
|
|
||||||
else:
|
|
||||||
# golang agent
|
|
||||||
for disk in disks:
|
for disk in disks:
|
||||||
tmp = {}
|
tmp = {}
|
||||||
for k, v in disk.items():
|
for _, _ in disk.items():
|
||||||
tmp["device"] = disk["device"]
|
tmp["device"] = disk["device"]
|
||||||
tmp["fstype"] = disk["fstype"]
|
tmp["fstype"] = disk["fstype"]
|
||||||
tmp["total"] = bytes2human(disk["total"])
|
tmp["total"] = bytes2human(disk["total"])
|
||||||
@@ -179,71 +96,178 @@ class Hello(APIView):
|
|||||||
tmp["percent"] = int(disk["percent"])
|
tmp["percent"] = int(disk["percent"])
|
||||||
new.append(tmp)
|
new.append(tmp)
|
||||||
|
|
||||||
if request.data["logged_in_username"] == "None":
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save(last_seen=djangotime.now(), disks=new)
|
serializer.save(disks=new)
|
||||||
else:
|
return Response("ok")
|
||||||
serializer.save(
|
|
||||||
last_seen=djangotime.now(),
|
if request.data["func"] == "loggedonuser":
|
||||||
disks=new,
|
if request.data["logged_in_username"] != "None":
|
||||||
last_logged_in_user=request.data["logged_in_username"],
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
if request.data["func"] == "software":
|
||||||
|
raw: SoftwareList = request.data["software"]
|
||||||
|
if not isinstance(raw, list):
|
||||||
|
return notify_error("err")
|
||||||
|
|
||||||
|
sw = filter_software(raw)
|
||||||
|
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||||
|
InstalledSoftware(agent=agent, software=sw).save()
|
||||||
|
else:
|
||||||
|
s = agent.installedsoftware_set.first() # type: ignore
|
||||||
|
s.software = sw
|
||||||
|
s.save(update_fields=["software"])
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
# called once during tacticalagent windows service startup
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
if not agent.choco_installed:
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False))
|
||||||
|
|
||||||
|
time.sleep(0.5)
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class SyncMeshNodeID(APIView):
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
if agent.mesh_node_id != request.data["nodeid"]:
|
||||||
|
agent.mesh_node_id = request.data["nodeid"]
|
||||||
|
agent.save(update_fields=["mesh_node_id"])
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class Choco(APIView):
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
agent.choco_installed = request.data["installed"]
|
||||||
|
agent.save(update_fields=["choco_installed"])
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class WinUpdates(APIView):
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def put(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
reboot_policy: str = agent.get_patch_policy().reboot_after_install
|
||||||
|
reboot = False
|
||||||
|
|
||||||
|
if reboot_policy == "always":
|
||||||
|
reboot = True
|
||||||
|
|
||||||
|
if request.data["needs_reboot"]:
|
||||||
|
if reboot_policy == "required":
|
||||||
|
reboot = True
|
||||||
|
elif reboot_policy == "never":
|
||||||
|
agent.needs_reboot = True
|
||||||
|
agent.save(update_fields=["needs_reboot"])
|
||||||
|
|
||||||
|
if reboot:
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||||
|
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||||
|
|
||||||
|
agent.delete_superseded_updates()
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def patch(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
u = agent.winupdates.filter(guid=request.data["guid"]).last() # type: ignore
|
||||||
|
success: bool = request.data["success"]
|
||||||
|
if success:
|
||||||
|
u.result = "success"
|
||||||
|
u.downloaded = True
|
||||||
|
u.installed = True
|
||||||
|
u.date_installed = djangotime.now()
|
||||||
|
u.save(
|
||||||
|
update_fields=[
|
||||||
|
"result",
|
||||||
|
"downloaded",
|
||||||
|
"installed",
|
||||||
|
"date_installed",
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
u.result = "failed"
|
||||||
|
u.save(update_fields=["result"])
|
||||||
|
|
||||||
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
|
agent.delete_superseded_updates()
|
||||||
last_outage = agent.agentoutages.last()
|
|
||||||
last_outage.recovery_time = djangotime.now()
|
|
||||||
last_outage.save(update_fields=["recovery_time"])
|
|
||||||
|
|
||||||
if agent.overdue_email_alert:
|
|
||||||
agent_recovery_email_task.delay(pk=last_outage.pk)
|
|
||||||
if agent.overdue_text_alert:
|
|
||||||
agent_recovery_sms_task.delay(pk=last_outage.pk)
|
|
||||||
|
|
||||||
recovery = agent.recoveryactions.filter(last_run=None).last()
|
|
||||||
if recovery is not None:
|
|
||||||
recovery.last_run = djangotime.now()
|
|
||||||
recovery.save(update_fields=["last_run"])
|
|
||||||
return Response(recovery.send())
|
|
||||||
|
|
||||||
# handle agent update
|
|
||||||
if agent.pendingactions.filter(
|
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).exists():
|
|
||||||
update = agent.pendingactions.filter(
|
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).last()
|
|
||||||
update.status = "completed"
|
|
||||||
update.save(update_fields=["status"])
|
|
||||||
return Response(update.details)
|
|
||||||
|
|
||||||
# get any pending actions
|
|
||||||
if agent.pendingactions.filter(status="pending").exists():
|
|
||||||
agent.handle_pending_actions()
|
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
updates = request.data["wua_updates"]
|
||||||
|
for update in updates:
|
||||||
|
if agent.winupdates.filter(guid=update["guid"]).exists(): # type: ignore
|
||||||
|
u = agent.winupdates.filter(guid=update["guid"]).last() # type: ignore
|
||||||
|
u.downloaded = update["downloaded"]
|
||||||
|
u.installed = update["installed"]
|
||||||
|
u.save(update_fields=["downloaded", "installed"])
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
kb = "KB" + update["kb_article_ids"][0]
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
WinUpdate(
|
||||||
serializer.is_valid(raise_exception=True)
|
agent=agent,
|
||||||
serializer.save(last_seen=djangotime.now())
|
guid=update["guid"],
|
||||||
|
kb=kb,
|
||||||
|
title=update["title"],
|
||||||
|
installed=update["installed"],
|
||||||
|
downloaded=update["downloaded"],
|
||||||
|
description=update["description"],
|
||||||
|
severity=update["severity"],
|
||||||
|
categories=update["categories"],
|
||||||
|
category_ids=update["category_ids"],
|
||||||
|
kb_article_ids=update["kb_article_ids"],
|
||||||
|
more_info_urls=update["more_info_urls"],
|
||||||
|
support_url=update["support_url"],
|
||||||
|
revision_number=update["revision_number"],
|
||||||
|
).save()
|
||||||
|
|
||||||
sync_salt_modules_task.delay(agent.pk)
|
agent.delete_superseded_updates()
|
||||||
check_for_updates_task.apply_async(
|
|
||||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
|
|
||||||
)
|
|
||||||
|
|
||||||
if not agent.choco_installed:
|
# more superseded updates cleanup
|
||||||
install_chocolatey.delay(agent.pk, wait=True)
|
if pyver.parse(agent.version) <= pyver.parse("1.4.2"):
|
||||||
|
for u in agent.winupdates.filter( # type: ignore
|
||||||
|
date_installed__isnull=True, result="failed"
|
||||||
|
).exclude(installed=True):
|
||||||
|
u.delete()
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class SupersededWinUpdate(APIView):
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
updates = agent.winupdates.filter(guid=request.data["guid"]) # type: ignore
|
||||||
|
for u in updates:
|
||||||
|
u.delete()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class CheckRunner(APIView):
|
class CheckRunner(APIView):
|
||||||
"""
|
|
||||||
For the windows golang agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
authentication_classes = [TokenAuthentication]
|
authentication_classes = [TokenAuthentication]
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
@@ -254,13 +278,11 @@ class CheckRunner(APIView):
|
|||||||
ret = {
|
ret = {
|
||||||
"agent": agent.pk,
|
"agent": agent.pk,
|
||||||
"check_interval": agent.check_interval,
|
"check_interval": agent.check_interval,
|
||||||
"checks": CheckRunnerGetSerializerV3(checks, many=True).data,
|
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||||
}
|
}
|
||||||
return Response(ret)
|
return Response(ret)
|
||||||
|
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
from logs.models import AuditLog
|
|
||||||
|
|
||||||
check = get_object_or_404(Check, pk=request.data["id"])
|
check = get_object_or_404(Check, pk=request.data["id"])
|
||||||
check.last_run = djangotime.now()
|
check.last_run = djangotime.now()
|
||||||
check.save(update_fields=["last_run"])
|
check.save(update_fields=["last_run"])
|
||||||
@@ -269,11 +291,16 @@ class CheckRunner(APIView):
|
|||||||
return Response(status)
|
return Response(status)
|
||||||
|
|
||||||
|
|
||||||
class TaskRunner(APIView):
|
class CheckRunnerInterval(APIView):
|
||||||
"""
|
authentication_classes = [TokenAuthentication]
|
||||||
For the windows golang agent
|
permission_classes = [IsAuthenticated]
|
||||||
"""
|
|
||||||
|
|
||||||
|
def get(self, request, agentid):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
|
return Response({"agent": agent.pk, "check_interval": agent.check_interval})
|
||||||
|
|
||||||
|
|
||||||
|
class TaskRunner(APIView):
|
||||||
authentication_classes = [TokenAuthentication]
|
authentication_classes = [TokenAuthentication]
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
@@ -283,6 +310,7 @@ class TaskRunner(APIView):
|
|||||||
return Response(TaskGOGetSerializer(task).data)
|
return Response(TaskGOGetSerializer(task).data)
|
||||||
|
|
||||||
def patch(self, request, pk, agentid):
|
def patch(self, request, pk, agentid):
|
||||||
|
from alerts.models import Alert
|
||||||
from logs.models import AuditLog
|
from logs.models import AuditLog
|
||||||
|
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
@@ -294,7 +322,18 @@ class TaskRunner(APIView):
|
|||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save(last_run=djangotime.now())
|
serializer.save(last_run=djangotime.now())
|
||||||
|
|
||||||
new_task = AutomatedTask.objects.get(pk=task.pk)
|
status = "failing" if task.retcode != 0 else "passing"
|
||||||
|
|
||||||
|
new_task: AutomatedTask = AutomatedTask.objects.get(pk=task.pk)
|
||||||
|
new_task.status = status
|
||||||
|
new_task.save()
|
||||||
|
|
||||||
|
if status == "passing":
|
||||||
|
if Alert.objects.filter(assigned_task=new_task, resolved=False).exists():
|
||||||
|
Alert.handle_alert_resolve(new_task)
|
||||||
|
else:
|
||||||
|
Alert.handle_alert_failure(new_task)
|
||||||
|
|
||||||
AuditLog.objects.create(
|
AuditLog.objects.create(
|
||||||
username=agent.hostname,
|
username=agent.hostname,
|
||||||
agent=agent.hostname,
|
agent=agent.hostname,
|
||||||
@@ -307,154 +346,6 @@ class TaskRunner(APIView):
|
|||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class SaltMinion(APIView):
|
|
||||||
authentication_classes = [TokenAuthentication]
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
|
|
||||||
def get(self, request, agentid):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
|
||||||
ret = {
|
|
||||||
"latestVer": settings.LATEST_SALT_VER,
|
|
||||||
"currentVer": agent.salt_ver,
|
|
||||||
"salt_id": agent.salt_id,
|
|
||||||
"downloadURL": agent.winsalt_dl,
|
|
||||||
}
|
|
||||||
return Response(ret)
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
# accept the salt key
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
if agent.salt_id != request.data["saltid"]:
|
|
||||||
return notify_error("Salt keys do not match")
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = requests.post(
|
|
||||||
f"http://{settings.SALT_HOST}:8123/run",
|
|
||||||
json=[
|
|
||||||
{
|
|
||||||
"client": "wheel",
|
|
||||||
"fun": "key.accept",
|
|
||||||
"match": request.data["saltid"],
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
timeout=30,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
return notify_error("No communication between agent and salt-api")
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = resp.json()["return"][0]["data"]
|
|
||||||
minion = data["return"]["minions"][0]
|
|
||||||
except Exception:
|
|
||||||
return notify_error("Key error")
|
|
||||||
|
|
||||||
if data["success"] and minion == request.data["saltid"]:
|
|
||||||
return Response("Salt key was accepted")
|
|
||||||
else:
|
|
||||||
return notify_error("Not accepted")
|
|
||||||
|
|
||||||
def patch(self, request):
|
|
||||||
# sync modules
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
r = agent.salt_api_cmd(timeout=45, func="saltutil.sync_modules")
|
|
||||||
|
|
||||||
if r == "timeout" or r == "error":
|
|
||||||
return notify_error("Failed to sync salt modules")
|
|
||||||
|
|
||||||
if isinstance(r, list) and any("modules" in i for i in r):
|
|
||||||
return Response("Successfully synced salt modules")
|
|
||||||
elif isinstance(r, list) and not r:
|
|
||||||
return Response("Modules are already in sync")
|
|
||||||
else:
|
|
||||||
return notify_error(f"Failed to sync salt modules: {str(r)}")
|
|
||||||
|
|
||||||
def put(self, request):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
agent.salt_ver = request.data["ver"]
|
|
||||||
agent.save(update_fields=["salt_ver"])
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
|
|
||||||
class WinUpdater(APIView):
|
|
||||||
|
|
||||||
authentication_classes = [TokenAuthentication]
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
|
|
||||||
def get(self, request, agentid):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
|
||||||
agent.delete_superseded_updates()
|
|
||||||
patches = agent.winupdates.filter(action="approve").exclude(installed=True)
|
|
||||||
return Response(ApprovedUpdateSerializer(patches, many=True).data)
|
|
||||||
|
|
||||||
# agent sends patch results as it's installing them
|
|
||||||
def patch(self, request):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
kb = request.data["kb"]
|
|
||||||
results = request.data["results"]
|
|
||||||
update = agent.winupdates.get(kb=kb)
|
|
||||||
|
|
||||||
if results == "error" or results == "failed":
|
|
||||||
update.result = results
|
|
||||||
update.save(update_fields=["result"])
|
|
||||||
elif results == "success":
|
|
||||||
update.result = "success"
|
|
||||||
update.downloaded = True
|
|
||||||
update.installed = True
|
|
||||||
update.date_installed = djangotime.now()
|
|
||||||
update.save(
|
|
||||||
update_fields=[
|
|
||||||
"result",
|
|
||||||
"downloaded",
|
|
||||||
"installed",
|
|
||||||
"date_installed",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
elif results == "alreadyinstalled":
|
|
||||||
update.result = "success"
|
|
||||||
update.downloaded = True
|
|
||||||
update.installed = True
|
|
||||||
update.save(update_fields=["result", "downloaded", "installed"])
|
|
||||||
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
# agent calls this after it's finished installing all patches
|
|
||||||
def post(self, request):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
reboot_policy = agent.get_patch_policy().reboot_after_install
|
|
||||||
reboot = False
|
|
||||||
|
|
||||||
if reboot_policy == "always":
|
|
||||||
reboot = True
|
|
||||||
|
|
||||||
if request.data["reboot"]:
|
|
||||||
if reboot_policy == "required":
|
|
||||||
reboot = True
|
|
||||||
elif reboot_policy == "never":
|
|
||||||
agent.needs_reboot = True
|
|
||||||
agent.save(update_fields=["needs_reboot"])
|
|
||||||
|
|
||||||
if reboot:
|
|
||||||
if agent.has_nats:
|
|
||||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
|
||||||
else:
|
|
||||||
agent.salt_api_async(
|
|
||||||
func="system.reboot",
|
|
||||||
arg=7,
|
|
||||||
kwargs={"in_seconds": True},
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
|
||||||
else:
|
|
||||||
check_for_updates_task.apply_async(
|
|
||||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
|
|
||||||
class SysInfo(APIView):
|
class SysInfo(APIView):
|
||||||
authentication_classes = [TokenAuthentication]
|
authentication_classes = [TokenAuthentication]
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
@@ -470,29 +361,6 @@ class SysInfo(APIView):
|
|||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class MeshInfo(APIView):
|
|
||||||
authentication_classes = [TokenAuthentication]
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
|
|
||||||
def get(self, request, pk):
|
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
|
||||||
return Response(agent.mesh_node_id)
|
|
||||||
|
|
||||||
def patch(self, request, pk):
|
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
|
||||||
|
|
||||||
if "nodeidhex" in request.data:
|
|
||||||
# agent <= 1.1.0
|
|
||||||
nodeid = request.data["nodeidhex"]
|
|
||||||
else:
|
|
||||||
# agent >= 1.1.1
|
|
||||||
nodeid = request.data["nodeid"]
|
|
||||||
|
|
||||||
agent.mesh_node_id = nodeid
|
|
||||||
agent.save(update_fields=["mesh_node_id"])
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
|
|
||||||
class MeshExe(APIView):
|
class MeshExe(APIView):
|
||||||
""" Sends the mesh exe to the installer """
|
""" Sends the mesh exe to the installer """
|
||||||
|
|
||||||
@@ -542,10 +410,10 @@ class NewAgent(APIView):
|
|||||||
agent.salt_id = f"{agent.hostname}-{agent.pk}"
|
agent.salt_id = f"{agent.hostname}-{agent.pk}"
|
||||||
agent.save(update_fields=["salt_id"])
|
agent.save(update_fields=["salt_id"])
|
||||||
|
|
||||||
user = User.objects.create_user(
|
user = User.objects.create_user( # type: ignore
|
||||||
username=request.data["agent_id"],
|
username=request.data["agent_id"],
|
||||||
agent=agent,
|
agent=agent,
|
||||||
password=User.objects.make_random_password(60),
|
password=User.objects.make_random_password(60), # type: ignore
|
||||||
)
|
)
|
||||||
|
|
||||||
token = Token.objects.create(user=user)
|
token = Token.objects.create(user=user)
|
||||||
@@ -557,10 +425,6 @@ class NewAgent(APIView):
|
|||||||
|
|
||||||
reload_nats()
|
reload_nats()
|
||||||
|
|
||||||
# Generate policies for new agent
|
|
||||||
agent.generate_checks_from_policies()
|
|
||||||
agent.generate_tasks_from_policies()
|
|
||||||
|
|
||||||
# create agent install audit record
|
# create agent install audit record
|
||||||
AuditLog.objects.create(
|
AuditLog.objects.create(
|
||||||
username=request.user,
|
username=request.user,
|
||||||
@@ -594,7 +458,7 @@ class Software(APIView):
|
|||||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||||
InstalledSoftware(agent=agent, software=sw).save()
|
InstalledSoftware(agent=agent, software=sw).save()
|
||||||
else:
|
else:
|
||||||
s = agent.installedsoftware_set.first()
|
s = agent.installedsoftware_set.first() # type: ignore
|
||||||
s.software = sw
|
s.software = sw
|
||||||
s.save(update_fields=["software"])
|
s.save(update_fields=["software"])
|
||||||
|
|
||||||
@@ -619,11 +483,33 @@ class Installer(APIView):
|
|||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class InstallSalt(APIView):
|
class ChocoResult(APIView):
|
||||||
authentication_classes = [TokenAuthentication]
|
authentication_classes = [TokenAuthentication]
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
def get(self, request, agentid):
|
def patch(self, request, pk):
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
action = get_object_or_404(PendingAction, pk=pk)
|
||||||
install_salt_task.delay(agent.pk)
|
results: str = request.data["results"]
|
||||||
|
|
||||||
|
software_name = action.details["name"].lower()
|
||||||
|
success = [
|
||||||
|
"install",
|
||||||
|
"of",
|
||||||
|
software_name,
|
||||||
|
"was",
|
||||||
|
"successful",
|
||||||
|
"installed",
|
||||||
|
]
|
||||||
|
duplicate = [software_name, "already", "installed", "--force", "reinstall"]
|
||||||
|
installed = False
|
||||||
|
|
||||||
|
if all(x in results.lower() for x in success):
|
||||||
|
installed = True
|
||||||
|
elif all(x in results.lower() for x in duplicate):
|
||||||
|
installed = True
|
||||||
|
|
||||||
|
action.details["output"] = results
|
||||||
|
action.details["installed"] = installed
|
||||||
|
action.status = "completed"
|
||||||
|
action.save(update_fields=["details", "status"])
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.0.6 on 2020-06-04 17:13
|
# Generated by Django 3.0.6 on 2020-06-04 17:13
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-12 14:08
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('alerts', '0004_auto_20210212_1408'),
|
||||||
|
('automation', '0006_delete_policyexclusions'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='policy',
|
||||||
|
name='alert_template',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='policies', to='alerts.alerttemplate'),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from clients.models import Site, Client
|
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
|
||||||
@@ -10,14 +10,44 @@ class Policy(BaseAuditModel):
|
|||||||
desc = models.CharField(max_length=255, null=True, blank=True)
|
desc = models.CharField(max_length=255, null=True, blank=True)
|
||||||
active = models.BooleanField(default=False)
|
active = models.BooleanField(default=False)
|
||||||
enforced = models.BooleanField(default=False)
|
enforced = models.BooleanField(default=False)
|
||||||
|
alert_template = models.ForeignKey(
|
||||||
|
"alerts.AlertTemplate",
|
||||||
|
related_name="policies",
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
from automation.tasks import generate_agent_checks_from_policies_task
|
||||||
|
|
||||||
|
# get old policy if exists
|
||||||
|
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||||
|
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||||
|
|
||||||
|
# generate agent checks only if active and enforced were changed
|
||||||
|
if old_policy:
|
||||||
|
if old_policy.active != self.active or old_policy.enforced != self.enforced:
|
||||||
|
generate_agent_checks_from_policies_task.delay(
|
||||||
|
policypk=self.pk,
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def delete(self, *args, **kwargs):
|
||||||
|
from automation.tasks import generate_agent_checks_task
|
||||||
|
|
||||||
|
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
||||||
|
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||||
|
|
||||||
|
generate_agent_checks_task.delay(agents, create_tasks=True)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_default_server_policy(self):
|
def is_default_server_policy(self):
|
||||||
return self.default_server_policy.exists()
|
return self.default_server_policy.exists() # type: ignore
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_default_workstation_policy(self):
|
def is_default_workstation_policy(self):
|
||||||
return self.default_workstation_policy.exists()
|
return self.default_workstation_policy.exists() # type: ignore
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
@@ -26,7 +56,7 @@ class Policy(BaseAuditModel):
|
|||||||
return self.get_related("server") | self.get_related("workstation")
|
return self.get_related("server") | self.get_related("workstation")
|
||||||
|
|
||||||
def get_related(self, mon_type):
|
def get_related(self, mon_type):
|
||||||
explicit_agents = self.agents.filter(monitoring_type=mon_type)
|
explicit_agents = self.agents.filter(monitoring_type=mon_type) # type: ignore
|
||||||
explicit_clients = getattr(self, f"{mon_type}_clients").all()
|
explicit_clients = getattr(self, f"{mon_type}_clients").all()
|
||||||
explicit_sites = getattr(self, f"{mon_type}_sites").all()
|
explicit_sites = getattr(self, f"{mon_type}_sites").all()
|
||||||
|
|
||||||
@@ -58,6 +88,10 @@ class Policy(BaseAuditModel):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def cascade_policy_tasks(agent):
|
def cascade_policy_tasks(agent):
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
from autotasks.tasks import delete_win_task_schedule
|
||||||
|
from logs.models import PendingAction
|
||||||
|
|
||||||
# List of all tasks to be applied
|
# List of all tasks to be applied
|
||||||
tasks = list()
|
tasks = list()
|
||||||
added_task_pks = list()
|
added_task_pks = list()
|
||||||
@@ -107,6 +141,35 @@ class Policy(BaseAuditModel):
|
|||||||
tasks.append(task)
|
tasks.append(task)
|
||||||
added_task_pks.append(task.pk)
|
added_task_pks.append(task.pk)
|
||||||
|
|
||||||
|
# remove policy tasks from agent not included in policy
|
||||||
|
for task in agent.autotasks.filter(
|
||||||
|
parent_task__in=[
|
||||||
|
taskpk
|
||||||
|
for taskpk in agent_tasks_parent_pks
|
||||||
|
if taskpk not in added_task_pks
|
||||||
|
]
|
||||||
|
):
|
||||||
|
delete_win_task_schedule.delay(task.pk)
|
||||||
|
|
||||||
|
# handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline
|
||||||
|
for action in agent.pendingactions.filter(action_type="taskaction").exclude(
|
||||||
|
status="completed"
|
||||||
|
):
|
||||||
|
task = AutomatedTask.objects.get(pk=action.details["task_id"])
|
||||||
|
if (
|
||||||
|
task.parent_task in agent_tasks_parent_pks
|
||||||
|
and task.parent_task in added_task_pks
|
||||||
|
):
|
||||||
|
agent.remove_matching_pending_task_actions(task.id)
|
||||||
|
|
||||||
|
PendingAction(
|
||||||
|
agent=agent,
|
||||||
|
action_type="taskaction",
|
||||||
|
details={"action": "taskcreate", "task_id": task.id},
|
||||||
|
).save()
|
||||||
|
task.sync_status = "notsynced"
|
||||||
|
task.save(update_fields=["sync_status"])
|
||||||
|
|
||||||
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -280,6 +343,15 @@ class Policy(BaseAuditModel):
|
|||||||
+ eventlog_checks
|
+ eventlog_checks
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# remove policy checks from agent that fell out of policy scope
|
||||||
|
agent.agentchecks.filter(
|
||||||
|
parent_check__in=[
|
||||||
|
checkpk
|
||||||
|
for checkpk in agent_checks_parent_pks
|
||||||
|
if checkpk not in [check.pk for check in final_list]
|
||||||
|
]
|
||||||
|
).delete()
|
||||||
|
|
||||||
return [
|
return [
|
||||||
check for check in final_list if check.pk not in agent_checks_parent_pks
|
check for check in final_list if check.pk not in agent_checks_parent_pks
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,20 +1,16 @@
|
|||||||
from rest_framework.serializers import (
|
from rest_framework.serializers import (
|
||||||
ModelSerializer,
|
ModelSerializer,
|
||||||
SerializerMethodField,
|
|
||||||
StringRelatedField,
|
|
||||||
ReadOnlyField,
|
ReadOnlyField,
|
||||||
|
SerializerMethodField,
|
||||||
)
|
)
|
||||||
|
|
||||||
from clients.serializers import ClientSerializer, SiteSerializer
|
|
||||||
from agents.serializers import AgentHostnameSerializer
|
|
||||||
|
|
||||||
from .models import Policy
|
|
||||||
from agents.models import Agent
|
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
from checks.models import Check
|
from checks.models import Check
|
||||||
from clients.models import Client, Site
|
from clients.models import Client
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
|
from .models import Policy
|
||||||
|
|
||||||
|
|
||||||
class PolicySerializer(ModelSerializer):
|
class PolicySerializer(ModelSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -24,15 +20,11 @@ class PolicySerializer(ModelSerializer):
|
|||||||
|
|
||||||
class PolicyTableSerializer(ModelSerializer):
|
class PolicyTableSerializer(ModelSerializer):
|
||||||
|
|
||||||
server_clients = ClientSerializer(many=True, read_only=True)
|
|
||||||
server_sites = SiteSerializer(many=True, read_only=True)
|
|
||||||
workstation_clients = ClientSerializer(many=True, read_only=True)
|
|
||||||
workstation_sites = SiteSerializer(many=True, read_only=True)
|
|
||||||
agents = AgentHostnameSerializer(many=True, read_only=True)
|
|
||||||
default_server_policy = ReadOnlyField(source="is_default_server_policy")
|
default_server_policy = ReadOnlyField(source="is_default_server_policy")
|
||||||
default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy")
|
default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy")
|
||||||
agents_count = SerializerMethodField(read_only=True)
|
agents_count = SerializerMethodField(read_only=True)
|
||||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||||
|
alert_template = ReadOnlyField(source="alert_template.id")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Policy
|
model = Policy
|
||||||
@@ -78,49 +70,16 @@ class PolicyCheckSerializer(ModelSerializer):
|
|||||||
"assignedtask",
|
"assignedtask",
|
||||||
"text_alert",
|
"text_alert",
|
||||||
"email_alert",
|
"email_alert",
|
||||||
|
"dashboard_alert",
|
||||||
)
|
)
|
||||||
depth = 1
|
depth = 1
|
||||||
|
|
||||||
|
|
||||||
class AutoTasksFieldSerializer(ModelSerializer):
|
class AutoTasksFieldSerializer(ModelSerializer):
|
||||||
assigned_check = PolicyCheckSerializer(read_only=True)
|
assigned_check = PolicyCheckSerializer(read_only=True)
|
||||||
|
script = ReadOnlyField(source="script.id")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = AutomatedTask
|
model = AutomatedTask
|
||||||
fields = ("id", "enabled", "name", "schedule", "assigned_check")
|
fields = "__all__"
|
||||||
depth = 1
|
|
||||||
|
|
||||||
|
|
||||||
class AutoTaskPolicySerializer(ModelSerializer):
|
|
||||||
|
|
||||||
autotasks = AutoTasksFieldSerializer(many=True, read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Policy
|
|
||||||
fields = (
|
|
||||||
"id",
|
|
||||||
"name",
|
|
||||||
"autotasks",
|
|
||||||
)
|
|
||||||
depth = 2
|
|
||||||
|
|
||||||
|
|
||||||
class RelatedClientPolicySerializer(ModelSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Client
|
|
||||||
fields = ("workstation_policy", "server_policy")
|
|
||||||
depth = 1
|
|
||||||
|
|
||||||
|
|
||||||
class RelatedSitePolicySerializer(ModelSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Site
|
|
||||||
fields = ("workstation_policy", "server_policy")
|
|
||||||
depth = 1
|
|
||||||
|
|
||||||
|
|
||||||
class RelatedAgentPolicySerializer(ModelSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Agent
|
|
||||||
fields = ("policy",)
|
|
||||||
depth = 1
|
depth = 1
|
||||||
|
|||||||
@@ -1,80 +1,91 @@
|
|||||||
from automation.models import Policy
|
|
||||||
from checks.models import Check
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
|
from automation.models import Policy
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
from checks.models import Check
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def generate_agent_checks_from_policies_task(
|
# generates policy checks on agents affected by a policy and optionally generate automated tasks
|
||||||
###
|
def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||||
# copies the policy checks to all affected agents
|
|
||||||
#
|
|
||||||
# clear: clears all policy checks first
|
|
||||||
# create_tasks: also create tasks after checks are generated
|
|
||||||
###
|
|
||||||
policypk,
|
|
||||||
clear=False,
|
|
||||||
create_tasks=False,
|
|
||||||
):
|
|
||||||
|
|
||||||
policy = Policy.objects.get(pk=policypk)
|
policy = Policy.objects.get(pk=policypk)
|
||||||
|
|
||||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||||
agents = Agent.objects.all()
|
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||||
elif policy.is_default_server_policy:
|
elif policy.is_default_server_policy:
|
||||||
agents = Agent.objects.filter(monitoring_type="server")
|
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||||
|
"pk", "monitoring_type"
|
||||||
|
)
|
||||||
elif policy.is_default_workstation_policy:
|
elif policy.is_default_workstation_policy:
|
||||||
agents = Agent.objects.filter(monitoring_type="workstation")
|
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||||
|
"pk", "monitoring_type"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
agents = policy.related_agents()
|
agents = policy.related_agents().only("pk")
|
||||||
|
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
agent.generate_checks_from_policies(clear=clear)
|
agent.generate_checks_from_policies()
|
||||||
if create_tasks:
|
if create_tasks:
|
||||||
agent.generate_tasks_from_policies(
|
agent.generate_tasks_from_policies()
|
||||||
clear=clear,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def generate_agent_checks_by_location_task(
|
# generates policy checks on a list of agents and optionally generate automated tasks
|
||||||
location, mon_type, clear=False, create_tasks=False
|
def generate_agent_checks_task(agentpks, create_tasks=False):
|
||||||
):
|
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||||
|
agent.generate_checks_from_policies()
|
||||||
|
|
||||||
|
if create_tasks:
|
||||||
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
# generates policy checks on agent servers or workstations within a certain client or site and optionally generate automated tasks
|
||||||
|
def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False):
|
||||||
|
|
||||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||||
agent.generate_checks_from_policies(clear=clear)
|
agent.generate_checks_from_policies()
|
||||||
|
|
||||||
if create_tasks:
|
if create_tasks:
|
||||||
agent.generate_tasks_from_policies(clear=clear)
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def generate_all_agent_checks_task(mon_type, clear=False, create_tasks=False):
|
# generates policy checks on all agent servers or workstations and optionally generate automated tasks
|
||||||
|
def generate_all_agent_checks_task(mon_type, create_tasks=False):
|
||||||
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
||||||
agent.generate_checks_from_policies(clear=clear)
|
agent.generate_checks_from_policies()
|
||||||
|
|
||||||
if create_tasks:
|
if create_tasks:
|
||||||
agent.generate_tasks_from_policies(clear=clear)
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
|
# deletes a policy managed check from all agents
|
||||||
def delete_policy_check_task(checkpk):
|
def delete_policy_check_task(checkpk):
|
||||||
|
|
||||||
Check.objects.filter(parent_check=checkpk).delete()
|
Check.objects.filter(parent_check=checkpk).delete()
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
|
# updates policy managed check fields on agents
|
||||||
def update_policy_check_fields_task(checkpk):
|
def update_policy_check_fields_task(checkpk):
|
||||||
|
|
||||||
check = Check.objects.get(pk=checkpk)
|
check = Check.objects.get(pk=checkpk)
|
||||||
|
|
||||||
Check.objects.filter(parent_check=checkpk).update(
|
Check.objects.filter(parent_check=checkpk).update(
|
||||||
threshold=check.threshold,
|
warning_threshold=check.warning_threshold,
|
||||||
|
error_threshold=check.error_threshold,
|
||||||
|
alert_severity=check.alert_severity,
|
||||||
name=check.name,
|
name=check.name,
|
||||||
|
disk=check.disk,
|
||||||
fails_b4_alert=check.fails_b4_alert,
|
fails_b4_alert=check.fails_b4_alert,
|
||||||
ip=check.ip,
|
ip=check.ip,
|
||||||
|
script=check.script,
|
||||||
script_args=check.script_args,
|
script_args=check.script_args,
|
||||||
|
info_return_codes=check.info_return_codes,
|
||||||
|
warning_return_codes=check.warning_return_codes,
|
||||||
timeout=check.timeout,
|
timeout=check.timeout,
|
||||||
pass_if_start_pending=check.pass_if_start_pending,
|
pass_if_start_pending=check.pass_if_start_pending,
|
||||||
pass_if_svc_not_exist=check.pass_if_svc_not_exist,
|
pass_if_svc_not_exist=check.pass_if_svc_not_exist,
|
||||||
@@ -89,38 +100,37 @@ def update_policy_check_fields_task(checkpk):
|
|||||||
search_last_days=check.search_last_days,
|
search_last_days=check.search_last_days,
|
||||||
email_alert=check.email_alert,
|
email_alert=check.email_alert,
|
||||||
text_alert=check.text_alert,
|
text_alert=check.text_alert,
|
||||||
|
dashboard_alert=check.dashboard_alert,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def generate_agent_tasks_from_policies_task(policypk, clear=False):
|
# generates policy tasks on agents affected by a policy
|
||||||
|
def generate_agent_tasks_from_policies_task(policypk):
|
||||||
|
|
||||||
policy = Policy.objects.get(pk=policypk)
|
policy = Policy.objects.get(pk=policypk)
|
||||||
|
|
||||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||||
agents = Agent.objects.all()
|
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||||
elif policy.is_default_server_policy:
|
elif policy.is_default_server_policy:
|
||||||
agents = Agent.objects.filter(monitoring_type="server")
|
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||||
|
"pk", "monitoring_type"
|
||||||
|
)
|
||||||
elif policy.is_default_workstation_policy:
|
elif policy.is_default_workstation_policy:
|
||||||
agents = Agent.objects.filter(monitoring_type="workstation")
|
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||||
|
"pk", "monitoring_type"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
agents = policy.related_agents()
|
agents = policy.related_agents().only("pk")
|
||||||
|
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
agent.generate_tasks_from_policies(clear=clear)
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def generate_agent_tasks_by_location_task(location, mon_type, clear=False):
|
|
||||||
|
|
||||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
|
||||||
agent.generate_tasks_from_policies(clear=clear)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def delete_policy_autotask_task(taskpk):
|
def delete_policy_autotask_task(taskpk):
|
||||||
from autotasks.tasks import delete_win_task_schedule
|
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
|
from autotasks.tasks import delete_win_task_schedule
|
||||||
|
|
||||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
||||||
delete_win_task_schedule.delay(task.pk)
|
delete_win_task_schedule.delay(task.pk)
|
||||||
@@ -135,13 +145,23 @@ def run_win_policy_autotask_task(task_pks):
|
|||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def update_policy_task_fields_task(taskpk, enabled):
|
def update_policy_task_fields_task(taskpk, update_agent=False):
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
from autotasks.tasks import enable_or_disable_win_task
|
from autotasks.tasks import enable_or_disable_win_task
|
||||||
|
|
||||||
tasks = AutomatedTask.objects.filter(parent_task=taskpk)
|
task = AutomatedTask.objects.get(pk=taskpk)
|
||||||
|
|
||||||
tasks.update(enabled=enabled)
|
AutomatedTask.objects.filter(parent_task=taskpk).update(
|
||||||
|
alert_severity=task.alert_severity,
|
||||||
|
email_alert=task.email_alert,
|
||||||
|
text_alert=task.text_alert,
|
||||||
|
dashboard_alert=task.dashboard_alert,
|
||||||
|
script=task.script,
|
||||||
|
script_args=task.script_args,
|
||||||
|
name=task.name,
|
||||||
|
timeout=task.timeout,
|
||||||
|
enabled=task.enabled,
|
||||||
|
)
|
||||||
|
|
||||||
for autotask in tasks:
|
if update_agent:
|
||||||
enable_or_disable_win_task(autotask.pk, enabled)
|
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
||||||
|
enable_or_disable_win_task.delay(task.pk, task.enabled)
|
||||||
|
|||||||
@@ -1,21 +1,20 @@
|
|||||||
from unittest.mock import patch
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
from model_bakery import baker, seq
|
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from model_bakery import baker, seq
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
|
from tacticalrmm.test import TacticalTestCase
|
||||||
from winupdate.models import WinUpdatePolicy
|
from winupdate.models import WinUpdatePolicy
|
||||||
|
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
PolicyTableSerializer,
|
AutoTasksFieldSerializer,
|
||||||
PolicySerializer,
|
|
||||||
PolicyTaskStatusSerializer,
|
|
||||||
AutoTaskPolicySerializer,
|
|
||||||
PolicyOverviewSerializer,
|
|
||||||
PolicyCheckStatusSerializer,
|
|
||||||
PolicyCheckSerializer,
|
PolicyCheckSerializer,
|
||||||
RelatedAgentPolicySerializer,
|
PolicyCheckStatusSerializer,
|
||||||
RelatedSitePolicySerializer,
|
PolicyOverviewSerializer,
|
||||||
RelatedClientPolicySerializer,
|
PolicySerializer,
|
||||||
|
PolicyTableSerializer,
|
||||||
|
PolicyTaskStatusSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -91,7 +90,7 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
@patch("automation.tasks.generate_agent_checks_from_policies_task.delay")
|
@patch("automation.tasks.generate_agent_checks_from_policies_task.delay")
|
||||||
def test_update_policy(self, mock_checks_task):
|
def test_update_policy(self, generate_agent_checks_from_policies_task):
|
||||||
# returns 404 for invalid policy pk
|
# returns 404 for invalid policy pk
|
||||||
resp = self.client.put("/automation/policies/500/", format="json")
|
resp = self.client.put("/automation/policies/500/", format="json")
|
||||||
self.assertEqual(resp.status_code, 404)
|
self.assertEqual(resp.status_code, 404)
|
||||||
@@ -110,7 +109,7 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
# only called if active or enforced are updated
|
# only called if active or enforced are updated
|
||||||
mock_checks_task.assert_not_called()
|
generate_agent_checks_from_policies_task.assert_not_called()
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"name": "Test Policy Update",
|
"name": "Test Policy Update",
|
||||||
@@ -121,42 +120,43 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
|
|
||||||
resp = self.client.put(url, data, format="json")
|
resp = self.client.put(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
mock_checks_task.assert_called_with(
|
generate_agent_checks_from_policies_task.assert_called_with(
|
||||||
policypk=policy.pk, clear=True, create_tasks=True
|
policypk=policy.pk, create_tasks=True
|
||||||
)
|
)
|
||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
@patch("automation.tasks.generate_agent_checks_from_policies_task.delay")
|
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||||
@patch("automation.tasks.generate_agent_tasks_from_policies_task.delay")
|
def test_delete_policy(self, generate_agent_checks_task):
|
||||||
def test_delete_policy(self, mock_tasks_task, mock_checks_task):
|
|
||||||
# returns 404 for invalid policy pk
|
# returns 404 for invalid policy pk
|
||||||
resp = self.client.delete("/automation/policies/500/", format="json")
|
resp = self.client.delete("/automation/policies/500/", format="json")
|
||||||
self.assertEqual(resp.status_code, 404)
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
|
# setup data
|
||||||
policy = baker.make("automation.Policy")
|
policy = baker.make("automation.Policy")
|
||||||
|
site = baker.make("clients.Site")
|
||||||
|
agents = baker.make_recipe(
|
||||||
|
"agents.agent", site=site, policy=policy, _quantity=3
|
||||||
|
)
|
||||||
url = f"/automation/policies/{policy.pk}/"
|
url = f"/automation/policies/{policy.pk}/"
|
||||||
|
|
||||||
resp = self.client.delete(url, format="json")
|
resp = self.client.delete(url, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
mock_checks_task.assert_called_with(policypk=policy.pk, clear=True)
|
generate_agent_checks_task.assert_called_with(
|
||||||
mock_tasks_task.assert_called_with(policypk=policy.pk, clear=True)
|
[agent.pk for agent in agents], create_tasks=True
|
||||||
|
)
|
||||||
|
|
||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
def test_get_all_policy_tasks(self):
|
def test_get_all_policy_tasks(self):
|
||||||
# returns 404 for invalid policy pk
|
|
||||||
resp = self.client.get("/automation/500/policyautomatedtasks/", format="json")
|
|
||||||
self.assertEqual(resp.status_code, 404)
|
|
||||||
|
|
||||||
# create policy with tasks
|
# create policy with tasks
|
||||||
policy = baker.make("automation.Policy")
|
policy = baker.make("automation.Policy")
|
||||||
baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||||
url = f"/automation/{policy.pk}/policyautomatedtasks/"
|
url = f"/automation/{policy.pk}/policyautomatedtasks/"
|
||||||
|
|
||||||
resp = self.client.get(url, format="json")
|
resp = self.client.get(url, format="json")
|
||||||
serializer = AutoTaskPolicySerializer(policy)
|
serializer = AutoTasksFieldSerializer(tasks, many=True)
|
||||||
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEqual(resp.data, serializer.data)
|
self.assertEqual(resp.data, serializer.data)
|
||||||
@@ -182,8 +182,9 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
def test_get_policy_check_status(self):
|
def test_get_policy_check_status(self):
|
||||||
# set data
|
# setup data
|
||||||
agent = baker.make_recipe("agents.agent")
|
site = baker.make("clients.Site")
|
||||||
|
agent = baker.make_recipe("agents.agent", site=site)
|
||||||
policy = baker.make("automation.Policy")
|
policy = baker.make("automation.Policy")
|
||||||
policy_diskcheck = baker.make_recipe("checks.diskspace_check", policy=policy)
|
policy_diskcheck = baker.make_recipe("checks.diskspace_check", policy=policy)
|
||||||
managed_check = baker.make_recipe(
|
managed_check = baker.make_recipe(
|
||||||
@@ -248,274 +249,6 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
@patch("agents.models.Agent.generate_checks_from_policies")
|
|
||||||
@patch("automation.tasks.generate_agent_checks_by_location_task.delay")
|
|
||||||
def test_update_policy_add(
|
|
||||||
self,
|
|
||||||
mock_checks_location_task,
|
|
||||||
mock_checks_task,
|
|
||||||
):
|
|
||||||
url = f"/automation/related/"
|
|
||||||
|
|
||||||
# data setup
|
|
||||||
policy = baker.make("automation.Policy")
|
|
||||||
client = baker.make("clients.Client")
|
|
||||||
site = baker.make("clients.Site", client=client)
|
|
||||||
agent = baker.make_recipe("agents.agent", site=site)
|
|
||||||
|
|
||||||
# test add client to policy data
|
|
||||||
client_server_payload = {
|
|
||||||
"type": "client",
|
|
||||||
"pk": agent.client.pk,
|
|
||||||
"server_policy": policy.pk,
|
|
||||||
}
|
|
||||||
client_workstation_payload = {
|
|
||||||
"type": "client",
|
|
||||||
"pk": agent.client.pk,
|
|
||||||
"workstation_policy": policy.pk,
|
|
||||||
}
|
|
||||||
|
|
||||||
# test add site to policy data
|
|
||||||
site_server_payload = {
|
|
||||||
"type": "site",
|
|
||||||
"pk": agent.site.pk,
|
|
||||||
"server_policy": policy.pk,
|
|
||||||
}
|
|
||||||
site_workstation_payload = {
|
|
||||||
"type": "site",
|
|
||||||
"pk": agent.site.pk,
|
|
||||||
"workstation_policy": policy.pk,
|
|
||||||
}
|
|
||||||
|
|
||||||
# test add agent to policy data
|
|
||||||
agent_payload = {"type": "agent", "pk": agent.pk, "policy": policy.pk}
|
|
||||||
|
|
||||||
# test client server policy add
|
|
||||||
resp = self.client.post(url, client_server_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_location_task.assert_called_with(
|
|
||||||
location={"site__client_id": client.id},
|
|
||||||
mon_type="server",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
mock_checks_location_task.reset_mock()
|
|
||||||
|
|
||||||
# test client workstation policy add
|
|
||||||
resp = self.client.post(url, client_workstation_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_location_task.assert_called_with(
|
|
||||||
location={"site__client_id": client.id},
|
|
||||||
mon_type="workstation",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
mock_checks_location_task.reset_mock()
|
|
||||||
|
|
||||||
# test site add server policy
|
|
||||||
resp = self.client.post(url, site_server_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_location_task.assert_called_with(
|
|
||||||
location={"site_id": site.id},
|
|
||||||
mon_type="server",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
mock_checks_location_task.reset_mock()
|
|
||||||
|
|
||||||
# test site add workstation policy
|
|
||||||
resp = self.client.post(url, site_workstation_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_location_task.assert_called_with(
|
|
||||||
location={"site_id": site.id},
|
|
||||||
mon_type="workstation",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
mock_checks_location_task.reset_mock()
|
|
||||||
|
|
||||||
# test agent add
|
|
||||||
resp = self.client.post(url, agent_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_task.assert_called_with(clear=True)
|
|
||||||
mock_checks_task.reset_mock()
|
|
||||||
|
|
||||||
# Adding the same relations shouldn't trigger mocks
|
|
||||||
resp = self.client.post(url, client_server_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
resp = self.client.post(url, client_workstation_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
mock_checks_location_task.assert_not_called()
|
|
||||||
|
|
||||||
resp = self.client.post(url, site_server_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
resp = self.client.post(url, site_workstation_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
mock_checks_location_task.assert_not_called()
|
|
||||||
|
|
||||||
resp = self.client.post(url, agent_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_task.assert_not_called()
|
|
||||||
|
|
||||||
# test remove client from policy data
|
|
||||||
client_server_payload = {"type": "client", "pk": client.pk, "server_policy": 0}
|
|
||||||
client_workstation_payload = {
|
|
||||||
"type": "client",
|
|
||||||
"pk": client.pk,
|
|
||||||
"workstation_policy": 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
# test remove site from policy data
|
|
||||||
site_server_payload = {"type": "site", "pk": site.pk, "server_policy": 0}
|
|
||||||
site_workstation_payload = {
|
|
||||||
"type": "site",
|
|
||||||
"pk": site.pk,
|
|
||||||
"workstation_policy": 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
# test remove agent from policy
|
|
||||||
agent_payload = {"type": "agent", "pk": agent.pk, "policy": 0}
|
|
||||||
|
|
||||||
# test client server policy remove
|
|
||||||
resp = self.client.post(url, client_server_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_location_task.assert_called_with(
|
|
||||||
location={"site__client_id": client.id},
|
|
||||||
mon_type="server",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
mock_checks_location_task.reset_mock()
|
|
||||||
|
|
||||||
# test client workstation policy remove
|
|
||||||
resp = self.client.post(url, client_workstation_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_location_task.assert_called_with(
|
|
||||||
location={"site__client_id": client.id},
|
|
||||||
mon_type="workstation",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
mock_checks_location_task.reset_mock()
|
|
||||||
|
|
||||||
# test site remove server policy
|
|
||||||
resp = self.client.post(url, site_server_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_location_task.assert_called_with(
|
|
||||||
location={"site_id": site.id},
|
|
||||||
mon_type="server",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
mock_checks_location_task.reset_mock()
|
|
||||||
|
|
||||||
# test site remove workstation policy
|
|
||||||
resp = self.client.post(url, site_workstation_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_location_task.assert_called_with(
|
|
||||||
location={"site_id": site.id},
|
|
||||||
mon_type="workstation",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
mock_checks_location_task.reset_mock()
|
|
||||||
|
|
||||||
# test agent remove
|
|
||||||
resp = self.client.post(url, agent_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
# called because the relation changed
|
|
||||||
mock_checks_task.assert_called_with(clear=True)
|
|
||||||
mock_checks_task.reset_mock()
|
|
||||||
|
|
||||||
# adding the same relations shouldn't trigger mocks
|
|
||||||
resp = self.client.post(url, client_server_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
resp = self.client.post(url, client_workstation_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# shouldn't be called since nothing changed
|
|
||||||
mock_checks_location_task.assert_not_called()
|
|
||||||
|
|
||||||
resp = self.client.post(url, site_server_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
resp = self.client.post(url, site_workstation_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# shouldn't be called since nothing changed
|
|
||||||
mock_checks_location_task.assert_not_called()
|
|
||||||
|
|
||||||
resp = self.client.post(url, agent_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
|
|
||||||
# shouldn't be called since nothing changed
|
|
||||||
mock_checks_task.assert_not_called()
|
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
|
||||||
|
|
||||||
def test_get_relation_by_type(self):
|
|
||||||
url = f"/automation/related/"
|
|
||||||
|
|
||||||
# data setup
|
|
||||||
policy = baker.make("automation.Policy")
|
|
||||||
client = baker.make("clients.Client", workstation_policy=policy)
|
|
||||||
site = baker.make("clients.Site", server_policy=policy)
|
|
||||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
|
||||||
|
|
||||||
client_payload = {"type": "client", "pk": client.pk}
|
|
||||||
|
|
||||||
# test add site to policy
|
|
||||||
site_payload = {"type": "site", "pk": site.pk}
|
|
||||||
|
|
||||||
# test add agent to policy
|
|
||||||
agent_payload = {"type": "agent", "pk": agent.pk}
|
|
||||||
|
|
||||||
# test client relation get
|
|
||||||
serializer = RelatedClientPolicySerializer(client)
|
|
||||||
resp = self.client.patch(url, client_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertEqual(resp.data, serializer.data)
|
|
||||||
|
|
||||||
# test site relation get
|
|
||||||
serializer = RelatedSitePolicySerializer(site)
|
|
||||||
resp = self.client.patch(url, site_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertEqual(resp.data, serializer.data)
|
|
||||||
|
|
||||||
# test agent relation get
|
|
||||||
serializer = RelatedAgentPolicySerializer(agent)
|
|
||||||
resp = self.client.patch(url, agent_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertEqual(resp.data, serializer.data)
|
|
||||||
|
|
||||||
invalid_payload = {"type": "bad_type", "pk": 5}
|
|
||||||
|
|
||||||
resp = self.client.patch(url, invalid_payload, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 400)
|
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
|
||||||
|
|
||||||
def test_get_policy_task_status(self):
|
def test_get_policy_task_status(self):
|
||||||
|
|
||||||
# policy with a task
|
# policy with a task
|
||||||
@@ -749,11 +482,10 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
# setup data
|
# setup data
|
||||||
policy = baker.make("automation.Policy", active=True)
|
policy = baker.make("automation.Policy", active=True)
|
||||||
checks = self.create_checks(policy=policy)
|
checks = self.create_checks(policy=policy)
|
||||||
site = baker.make("clients.Site")
|
agent = baker.make_recipe("agents.agent", policy=policy)
|
||||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
|
||||||
|
|
||||||
# test policy assigned to agent
|
# test policy assigned to agent
|
||||||
generate_agent_checks_from_policies_task(policy.id, clear=True)
|
generate_agent_checks_from_policies_task(policy.id)
|
||||||
|
|
||||||
# make sure all checks were created. should be 7
|
# make sure all checks were created. should be 7
|
||||||
agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all()
|
agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all()
|
||||||
@@ -766,16 +498,19 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
if check.check_type == "diskspace":
|
if check.check_type == "diskspace":
|
||||||
self.assertEqual(check.parent_check, checks[0].id)
|
self.assertEqual(check.parent_check, checks[0].id)
|
||||||
self.assertEqual(check.disk, checks[0].disk)
|
self.assertEqual(check.disk, checks[0].disk)
|
||||||
self.assertEqual(check.threshold, checks[0].threshold)
|
self.assertEqual(check.error_threshold, checks[0].error_threshold)
|
||||||
|
self.assertEqual(check.warning_threshold, checks[0].warning_threshold)
|
||||||
elif check.check_type == "ping":
|
elif check.check_type == "ping":
|
||||||
self.assertEqual(check.parent_check, checks[1].id)
|
self.assertEqual(check.parent_check, checks[1].id)
|
||||||
self.assertEqual(check.ip, checks[1].ip)
|
self.assertEqual(check.ip, checks[1].ip)
|
||||||
elif check.check_type == "cpuload":
|
elif check.check_type == "cpuload":
|
||||||
self.assertEqual(check.parent_check, checks[2].id)
|
self.assertEqual(check.parent_check, checks[2].id)
|
||||||
self.assertEqual(check.threshold, checks[2].threshold)
|
self.assertEqual(check.error_threshold, checks[2].error_threshold)
|
||||||
|
self.assertEqual(check.warning_threshold, checks[2].warning_threshold)
|
||||||
elif check.check_type == "memory":
|
elif check.check_type == "memory":
|
||||||
self.assertEqual(check.parent_check, checks[3].id)
|
self.assertEqual(check.parent_check, checks[3].id)
|
||||||
self.assertEqual(check.threshold, checks[3].threshold)
|
self.assertEqual(check.error_threshold, checks[3].error_threshold)
|
||||||
|
self.assertEqual(check.warning_threshold, checks[3].warning_threshold)
|
||||||
elif check.check_type == "winsvc":
|
elif check.check_type == "winsvc":
|
||||||
self.assertEqual(check.parent_check, checks[4].id)
|
self.assertEqual(check.parent_check, checks[4].id)
|
||||||
self.assertEqual(check.svc_name, checks[4].svc_name)
|
self.assertEqual(check.svc_name, checks[4].svc_name)
|
||||||
@@ -811,71 +546,246 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
7,
|
7,
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_generating_agent_policy_checks_by_location(self):
|
@patch("automation.tasks.generate_agent_checks_by_location_task.delay")
|
||||||
from .tasks import generate_agent_checks_by_location_task
|
def test_generating_agent_policy_checks_by_location(
|
||||||
|
self, generate_agent_checks_by_location_task
|
||||||
|
):
|
||||||
|
from automation.tasks import (
|
||||||
|
generate_agent_checks_by_location_task as generate_agent_checks,
|
||||||
|
)
|
||||||
|
|
||||||
# setup data
|
# setup data
|
||||||
policy = baker.make("automation.Policy", active=True)
|
policy = baker.make("automation.Policy", active=True)
|
||||||
self.create_checks(policy=policy)
|
self.create_checks(policy=policy)
|
||||||
clients = baker.make(
|
|
||||||
"clients.Client",
|
|
||||||
_quantity=2,
|
|
||||||
server_policy=policy,
|
|
||||||
workstation_policy=policy,
|
|
||||||
)
|
|
||||||
sites = baker.make("clients.Site", client=cycle(clients), _quantity=4)
|
|
||||||
server_agent = baker.make_recipe("agents.server_agent", site=sites[0])
|
|
||||||
workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2])
|
|
||||||
agent1 = baker.make_recipe("agents.server_agent", site=sites[1])
|
|
||||||
agent2 = baker.make_recipe("agents.workstation_agent", site=sites[3])
|
|
||||||
|
|
||||||
generate_agent_checks_by_location_task(
|
baker.make(
|
||||||
{"site_id": sites[0].id},
|
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
|
||||||
"server",
|
)
|
||||||
clear=True,
|
|
||||||
|
server_agent = baker.make_recipe("agents.server_agent")
|
||||||
|
workstation_agent = baker.make_recipe("agents.workstation_agent")
|
||||||
|
|
||||||
|
# no checks should be preset on agents
|
||||||
|
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||||
|
self.assertEqual(
|
||||||
|
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# set workstation policy on client and policy checks should be there
|
||||||
|
workstation_agent.client.workstation_policy = policy
|
||||||
|
workstation_agent.client.save()
|
||||||
|
|
||||||
|
# should trigger task in save method on core
|
||||||
|
generate_agent_checks_by_location_task.assert_called_with(
|
||||||
|
location={"site__client_id": workstation_agent.client.pk},
|
||||||
|
mon_type="workstation",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
generate_agent_checks_by_location_task.reset_mock()
|
||||||
|
|
||||||
|
generate_agent_checks(
|
||||||
|
location={"site__client_id": workstation_agent.client.pk},
|
||||||
|
mon_type="workstation",
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# server_agent should have policy checks and the other agents should not
|
# make sure the checks were added
|
||||||
|
self.assertEqual(
|
||||||
|
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7
|
||||||
|
)
|
||||||
|
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||||
|
|
||||||
|
# remove workstation policy from client
|
||||||
|
workstation_agent.client.workstation_policy = None
|
||||||
|
workstation_agent.client.save()
|
||||||
|
|
||||||
|
# should trigger task in save method on core
|
||||||
|
generate_agent_checks_by_location_task.assert_called_with(
|
||||||
|
location={"site__client_id": workstation_agent.client.pk},
|
||||||
|
mon_type="workstation",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
generate_agent_checks_by_location_task.reset_mock()
|
||||||
|
|
||||||
|
generate_agent_checks(
|
||||||
|
location={"site__client_id": workstation_agent.client.pk},
|
||||||
|
mon_type="workstation",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# make sure the checks were removed
|
||||||
|
self.assertEqual(
|
||||||
|
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||||
|
)
|
||||||
|
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||||
|
|
||||||
|
# set server policy on client and policy checks should be there
|
||||||
|
server_agent.client.server_policy = policy
|
||||||
|
server_agent.client.save()
|
||||||
|
|
||||||
|
# should trigger task in save method on core
|
||||||
|
generate_agent_checks_by_location_task.assert_called_with(
|
||||||
|
location={"site__client_id": server_agent.client.pk},
|
||||||
|
mon_type="server",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
generate_agent_checks_by_location_task.reset_mock()
|
||||||
|
|
||||||
|
generate_agent_checks(
|
||||||
|
location={"site__client_id": server_agent.client.pk},
|
||||||
|
mon_type="server",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# make sure checks were added
|
||||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
|
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||||
)
|
)
|
||||||
self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 0)
|
|
||||||
|
|
||||||
generate_agent_checks_by_location_task(
|
# remove server policy from client
|
||||||
{"site__client_id": clients[0].id},
|
server_agent.client.server_policy = None
|
||||||
"workstation",
|
server_agent.client.save()
|
||||||
clear=True,
|
|
||||||
|
# should trigger task in save method on core
|
||||||
|
generate_agent_checks_by_location_task.assert_called_with(
|
||||||
|
location={"site__client_id": server_agent.client.pk},
|
||||||
|
mon_type="server",
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
# workstation_agent should now have policy checks and the other agents should not
|
generate_agent_checks_by_location_task.reset_mock()
|
||||||
|
|
||||||
|
generate_agent_checks(
|
||||||
|
location={"site__client_id": server_agent.client.pk},
|
||||||
|
mon_type="server",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# make sure checks were removed
|
||||||
|
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||||
|
self.assertEqual(
|
||||||
|
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# set workstation policy on site and policy checks should be there
|
||||||
|
workstation_agent.site.workstation_policy = policy
|
||||||
|
workstation_agent.site.save()
|
||||||
|
|
||||||
|
# should trigger task in save method on core
|
||||||
|
generate_agent_checks_by_location_task.assert_called_with(
|
||||||
|
location={"site_id": workstation_agent.site.pk},
|
||||||
|
mon_type="workstation",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
generate_agent_checks_by_location_task.reset_mock()
|
||||||
|
|
||||||
|
generate_agent_checks(
|
||||||
|
location={"site_id": workstation_agent.site.pk},
|
||||||
|
mon_type="workstation",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# make sure checks were added on workstation
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7
|
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7
|
||||||
)
|
)
|
||||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
|
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||||
self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 0)
|
|
||||||
self.assertEqual(Agent.objects.get(pk=agent2.id).agentchecks.count(), 0)
|
|
||||||
|
|
||||||
def test_generating_policy_checks_for_all_agents(self):
|
# remove workstation policy from site
|
||||||
from .tasks import generate_all_agent_checks_task
|
workstation_agent.site.workstation_policy = None
|
||||||
|
workstation_agent.site.save()
|
||||||
|
|
||||||
|
# should trigger task in save method on core
|
||||||
|
generate_agent_checks_by_location_task.assert_called_with(
|
||||||
|
location={"site_id": workstation_agent.site.pk},
|
||||||
|
mon_type="workstation",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
generate_agent_checks_by_location_task.reset_mock()
|
||||||
|
|
||||||
|
generate_agent_checks(
|
||||||
|
location={"site_id": workstation_agent.site.pk},
|
||||||
|
mon_type="workstation",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# make sure checks were removed
|
||||||
|
self.assertEqual(
|
||||||
|
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||||
|
)
|
||||||
|
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||||
|
|
||||||
|
# set server policy on site and policy checks should be there
|
||||||
|
server_agent.site.server_policy = policy
|
||||||
|
server_agent.site.save()
|
||||||
|
|
||||||
|
# should trigger task in save method on core
|
||||||
|
generate_agent_checks_by_location_task.assert_called_with(
|
||||||
|
location={"site_id": server_agent.site.pk},
|
||||||
|
mon_type="server",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
generate_agent_checks_by_location_task.reset_mock()
|
||||||
|
|
||||||
|
generate_agent_checks(
|
||||||
|
location={"site_id": server_agent.site.pk},
|
||||||
|
mon_type="server",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# make sure checks were added
|
||||||
|
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
|
||||||
|
self.assertEqual(
|
||||||
|
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# remove server policy from site
|
||||||
|
server_agent.site.server_policy = None
|
||||||
|
server_agent.site.save()
|
||||||
|
|
||||||
|
# should trigger task in save method on core
|
||||||
|
generate_agent_checks_by_location_task.assert_called_with(
|
||||||
|
location={"site_id": server_agent.site.pk},
|
||||||
|
mon_type="server",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
generate_agent_checks_by_location_task.reset_mock()
|
||||||
|
|
||||||
|
generate_agent_checks(
|
||||||
|
location={"site_id": server_agent.site.pk},
|
||||||
|
mon_type="server",
|
||||||
|
create_tasks=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# make sure checks were removed
|
||||||
|
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||||
|
self.assertEqual(
|
||||||
|
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||||
|
)
|
||||||
|
|
||||||
|
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||||
|
def test_generating_policy_checks_for_all_agents(
|
||||||
|
self, generate_all_agent_checks_task
|
||||||
|
):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
|
from .tasks import generate_all_agent_checks_task as generate_all_checks
|
||||||
|
|
||||||
# setup data
|
# setup data
|
||||||
policy = baker.make("automation.Policy", active=True)
|
policy = baker.make("automation.Policy", active=True)
|
||||||
self.create_checks(policy=policy)
|
self.create_checks(policy=policy)
|
||||||
|
|
||||||
site = baker.make("clients.Site")
|
server_agents = baker.make_recipe("agents.server_agent", _quantity=3)
|
||||||
server_agents = baker.make_recipe("agents.server_agent", site=site, _quantity=3)
|
workstation_agents = baker.make_recipe("agents.workstation_agent", _quantity=4)
|
||||||
workstation_agents = baker.make_recipe(
|
|
||||||
"agents.workstation_agent", site=site, _quantity=4
|
|
||||||
)
|
|
||||||
core = CoreSettings.objects.first()
|
core = CoreSettings.objects.first()
|
||||||
core.server_policy = policy
|
core.server_policy = policy
|
||||||
core.workstation_policy = policy
|
|
||||||
core.save()
|
core.save()
|
||||||
|
|
||||||
generate_all_agent_checks_task("server", clear=True, create_tasks=True)
|
generate_all_agent_checks_task.assert_called_with(
|
||||||
|
mon_type="server", create_tasks=True
|
||||||
|
)
|
||||||
|
generate_all_agent_checks_task.reset_mock()
|
||||||
|
generate_all_checks(mon_type="server", create_tasks=True)
|
||||||
|
|
||||||
# all servers should have 7 checks
|
# all servers should have 7 checks
|
||||||
for agent in server_agents:
|
for agent in server_agents:
|
||||||
@@ -884,24 +794,50 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
for agent in workstation_agents:
|
for agent in workstation_agents:
|
||||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||||
|
|
||||||
generate_all_agent_checks_task("workstation", clear=True, create_tasks=True)
|
core.server_policy = None
|
||||||
|
core.workstation_policy = policy
|
||||||
|
core.save()
|
||||||
|
|
||||||
# all agents should have 7 checks now
|
generate_all_agent_checks_task.assert_any_call(
|
||||||
|
mon_type="workstation", create_tasks=True
|
||||||
|
)
|
||||||
|
generate_all_agent_checks_task.assert_any_call(
|
||||||
|
mon_type="server", create_tasks=True
|
||||||
|
)
|
||||||
|
generate_all_agent_checks_task.reset_mock()
|
||||||
|
generate_all_checks(mon_type="server", create_tasks=True)
|
||||||
|
generate_all_checks(mon_type="workstation", create_tasks=True)
|
||||||
|
|
||||||
|
# all workstations should have 7 checks
|
||||||
for agent in server_agents:
|
for agent in server_agents:
|
||||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||||
|
|
||||||
for agent in workstation_agents:
|
for agent in workstation_agents:
|
||||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||||
|
|
||||||
|
core.workstation_policy = None
|
||||||
|
core.save()
|
||||||
|
|
||||||
|
generate_all_agent_checks_task.assert_called_with(
|
||||||
|
mon_type="workstation", create_tasks=True
|
||||||
|
)
|
||||||
|
generate_all_agent_checks_task.reset_mock()
|
||||||
|
generate_all_checks(mon_type="workstation", create_tasks=True)
|
||||||
|
|
||||||
|
# nothing should have the checks
|
||||||
|
for agent in server_agents:
|
||||||
|
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||||
|
|
||||||
|
for agent in workstation_agents:
|
||||||
|
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||||
|
|
||||||
def test_delete_policy_check(self):
|
def test_delete_policy_check(self):
|
||||||
from .tasks import delete_policy_check_task
|
|
||||||
from .models import Policy
|
from .models import Policy
|
||||||
|
from .tasks import delete_policy_check_task
|
||||||
|
|
||||||
policy = baker.make("automation.Policy", active=True)
|
policy = baker.make("automation.Policy", active=True)
|
||||||
self.create_checks(policy=policy)
|
self.create_checks(policy=policy)
|
||||||
site = baker.make("clients.Site")
|
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
|
||||||
agent.generate_checks_from_policies()
|
|
||||||
|
|
||||||
# make sure agent has 7 checks
|
# make sure agent has 7 checks
|
||||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||||
@@ -920,13 +856,12 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def update_policy_check_fields(self):
|
def update_policy_check_fields(self):
|
||||||
from .tasks import update_policy_check_fields_task
|
|
||||||
from .models import Policy
|
from .models import Policy
|
||||||
|
from .tasks import update_policy_check_fields_task
|
||||||
|
|
||||||
policy = baker.make("automation.Policy", active=True)
|
policy = baker.make("automation.Policy", active=True)
|
||||||
self.create_checks(policy=policy)
|
self.create_checks(policy=policy)
|
||||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||||
agent.generate_checks_from_policies()
|
|
||||||
|
|
||||||
# make sure agent has 7 checks
|
# make sure agent has 7 checks
|
||||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||||
@@ -958,10 +893,9 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
tasks = baker.make(
|
tasks = baker.make(
|
||||||
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
|
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
|
||||||
)
|
)
|
||||||
site = baker.make("clients.Site")
|
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
|
||||||
|
|
||||||
generate_agent_tasks_from_policies_task(policy.id, clear=True)
|
generate_agent_tasks_from_policies_task(policy.id)
|
||||||
|
|
||||||
agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all()
|
agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all()
|
||||||
|
|
||||||
@@ -980,63 +914,19 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
self.assertEqual(task.parent_task, tasks[2].id)
|
self.assertEqual(task.parent_task, tasks[2].id)
|
||||||
self.assertEqual(task.name, tasks[2].name)
|
self.assertEqual(task.name, tasks[2].name)
|
||||||
|
|
||||||
def test_generate_agent_tasks_by_location(self):
|
|
||||||
from .tasks import generate_agent_tasks_by_location_task
|
|
||||||
|
|
||||||
# setup data
|
|
||||||
policy = baker.make("automation.Policy", active=True)
|
|
||||||
baker.make(
|
|
||||||
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
|
|
||||||
)
|
|
||||||
clients = baker.make(
|
|
||||||
"clients.Client",
|
|
||||||
_quantity=2,
|
|
||||||
server_policy=policy,
|
|
||||||
workstation_policy=policy,
|
|
||||||
)
|
|
||||||
sites = baker.make("clients.Site", client=cycle(clients), _quantity=4)
|
|
||||||
server_agent = baker.make_recipe("agents.server_agent", site=sites[0])
|
|
||||||
workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2])
|
|
||||||
agent1 = baker.make_recipe("agents.agent", site=sites[1])
|
|
||||||
agent2 = baker.make_recipe("agents.agent", site=sites[3])
|
|
||||||
|
|
||||||
generate_agent_tasks_by_location_task(
|
|
||||||
{"site_id": sites[0].id}, "server", clear=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# all servers in site1 and site2 should have 3 tasks
|
|
||||||
self.assertEqual(
|
|
||||||
Agent.objects.get(pk=workstation_agent.id).autotasks.count(), 0
|
|
||||||
)
|
|
||||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).autotasks.count(), 3)
|
|
||||||
self.assertEqual(Agent.objects.get(pk=agent1.id).autotasks.count(), 0)
|
|
||||||
self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0)
|
|
||||||
|
|
||||||
generate_agent_tasks_by_location_task(
|
|
||||||
{"site__client_id": clients[0].id}, "workstation", clear=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# all workstations in Default1 should have 3 tasks
|
|
||||||
self.assertEqual(
|
|
||||||
Agent.objects.get(pk=workstation_agent.id).autotasks.count(), 3
|
|
||||||
)
|
|
||||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).autotasks.count(), 3)
|
|
||||||
self.assertEqual(Agent.objects.get(pk=agent1.id).autotasks.count(), 0)
|
|
||||||
self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0)
|
|
||||||
|
|
||||||
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
||||||
def test_delete_policy_tasks(self, delete_win_task_schedule):
|
def test_delete_policy_tasks(self, delete_win_task_schedule):
|
||||||
from .tasks import delete_policy_autotask_task
|
from .tasks import delete_policy_autotask_task
|
||||||
|
|
||||||
policy = baker.make("automation.Policy", active=True)
|
policy = baker.make("automation.Policy", active=True)
|
||||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||||
site = baker.make("clients.Site")
|
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
|
||||||
agent.generate_tasks_from_policies()
|
|
||||||
|
|
||||||
delete_policy_autotask_task(tasks[0].id)
|
delete_policy_autotask_task(tasks[0].id)
|
||||||
|
|
||||||
delete_win_task_schedule.assert_called_with(agent.autotasks.first().id)
|
delete_win_task_schedule.assert_called_with(
|
||||||
|
agent.autotasks.get(parent_task=tasks[0].id).id
|
||||||
|
)
|
||||||
|
|
||||||
@patch("autotasks.tasks.run_win_task.delay")
|
@patch("autotasks.tasks.run_win_task.delay")
|
||||||
def test_run_policy_task(self, run_win_task):
|
def test_run_policy_task(self, run_win_task):
|
||||||
@@ -1051,25 +941,46 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
for task in tasks:
|
for task in tasks:
|
||||||
run_win_task.assert_any_call(task.id)
|
run_win_task.assert_any_call(task.id)
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("autotasks.tasks.enable_or_disable_win_task.delay")
|
||||||
def test_update_policy_tasks(self, nats_cmd):
|
def test_update_policy_tasks(self, enable_or_disable_win_task):
|
||||||
from .tasks import update_policy_task_fields_task
|
from .tasks import update_policy_task_fields_task
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
|
|
||||||
nats_cmd.return_value = "ok"
|
|
||||||
|
|
||||||
# setup data
|
# setup data
|
||||||
policy = baker.make("automation.Policy", active=True)
|
policy = baker.make("automation.Policy", active=True)
|
||||||
tasks = baker.make(
|
tasks = baker.make(
|
||||||
"autotasks.AutomatedTask", enabled=True, policy=policy, _quantity=3
|
"autotasks.AutomatedTask", enabled=True, policy=policy, _quantity=3
|
||||||
)
|
)
|
||||||
site = baker.make("clients.Site")
|
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
|
||||||
agent.generate_tasks_from_policies()
|
|
||||||
|
|
||||||
tasks[0].enabled = False
|
tasks[0].enabled = False
|
||||||
tasks[0].save()
|
tasks[0].save()
|
||||||
|
|
||||||
update_policy_task_fields_task(tasks[0].id, enabled=False)
|
update_policy_task_fields_task(tasks[0].id)
|
||||||
|
enable_or_disable_win_task.assert_not_called()
|
||||||
|
|
||||||
self.assertFalse(AutomatedTask.objects.get(parent_task=tasks[0].id).enabled)
|
self.assertFalse(agent.autotasks.get(parent_task=tasks[0].id).enabled)
|
||||||
|
|
||||||
|
update_policy_task_fields_task(tasks[0].id, update_agent=True)
|
||||||
|
enable_or_disable_win_task.assert_called_with(
|
||||||
|
agent.autotasks.get(parent_task=tasks[0].id).id, False
|
||||||
|
)
|
||||||
|
|
||||||
|
@patch("agents.models.Agent.generate_tasks_from_policies")
|
||||||
|
@patch("agents.models.Agent.generate_checks_from_policies")
|
||||||
|
def test_generate_agent_checks_with_agentpks(self, generate_checks, generate_tasks):
|
||||||
|
from automation.tasks import generate_agent_checks_task
|
||||||
|
|
||||||
|
agents = baker.make_recipe("agents.agent", _quantity=5)
|
||||||
|
|
||||||
|
# reset because creating agents triggers it
|
||||||
|
generate_checks.reset_mock()
|
||||||
|
generate_tasks.reset_mock()
|
||||||
|
|
||||||
|
generate_agent_checks_task([agent.pk for agent in agents])
|
||||||
|
self.assertEquals(generate_checks.call_count, 5)
|
||||||
|
generate_tasks.assert_not_called()
|
||||||
|
generate_checks.reset_mock()
|
||||||
|
|
||||||
|
generate_agent_checks_task([agent.pk for agent in agents], create_tasks=True)
|
||||||
|
self.assertEquals(generate_checks.call_count, 5)
|
||||||
|
self.assertEquals(generate_checks.call_count, 5)
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("policies/", views.GetAddPolicies.as_view()),
|
path("policies/", views.GetAddPolicies.as_view()),
|
||||||
path("policies/<int:pk>/related/", views.GetRelated.as_view()),
|
path("policies/<int:pk>/related/", views.GetRelated.as_view()),
|
||||||
path("related/", views.GetRelated.as_view()),
|
|
||||||
path("policies/overview/", views.OverviewPolicy.as_view()),
|
path("policies/overview/", views.OverviewPolicy.as_view()),
|
||||||
path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()),
|
path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()),
|
||||||
path("<int:pk>/policychecks/", views.PolicyCheck.as_view()),
|
path("<int:pk>/policychecks/", views.PolicyCheck.as_view()),
|
||||||
|
|||||||
@@ -1,39 +1,27 @@
|
|||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
|
|
||||||
from rest_framework.views import APIView
|
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from .models import Policy
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from clients.models import Client, Site
|
|
||||||
from checks.models import Check
|
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
from winupdate.models import WinUpdatePolicy
|
|
||||||
|
|
||||||
from clients.serializers import ClientSerializer, SiteSerializer
|
|
||||||
from agents.serializers import AgentHostnameSerializer
|
from agents.serializers import AgentHostnameSerializer
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
from checks.models import Check
|
||||||
|
from clients.models import Client
|
||||||
|
from clients.serializers import ClientSerializer, SiteSerializer
|
||||||
|
from winupdate.models import WinUpdatePolicy
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
|
from .models import Policy
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
|
AutoTasksFieldSerializer,
|
||||||
|
PolicyCheckSerializer,
|
||||||
|
PolicyCheckStatusSerializer,
|
||||||
|
PolicyOverviewSerializer,
|
||||||
PolicySerializer,
|
PolicySerializer,
|
||||||
PolicyTableSerializer,
|
PolicyTableSerializer,
|
||||||
PolicyOverviewSerializer,
|
|
||||||
PolicyCheckStatusSerializer,
|
|
||||||
PolicyCheckSerializer,
|
|
||||||
PolicyTaskStatusSerializer,
|
PolicyTaskStatusSerializer,
|
||||||
AutoTaskPolicySerializer,
|
|
||||||
RelatedClientPolicySerializer,
|
|
||||||
RelatedSitePolicySerializer,
|
|
||||||
RelatedAgentPolicySerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .tasks import (
|
|
||||||
generate_agent_checks_from_policies_task,
|
|
||||||
generate_agent_checks_by_location_task,
|
|
||||||
generate_agent_tasks_from_policies_task,
|
|
||||||
run_win_policy_autotask_task,
|
|
||||||
)
|
)
|
||||||
|
from .tasks import run_win_policy_autotask_task
|
||||||
|
|
||||||
|
|
||||||
class GetAddPolicies(APIView):
|
class GetAddPolicies(APIView):
|
||||||
@@ -72,30 +60,14 @@ class GetUpdateDeletePolicy(APIView):
|
|||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
policy = get_object_or_404(Policy, pk=pk)
|
policy = get_object_or_404(Policy, pk=pk)
|
||||||
|
|
||||||
old_active = policy.active
|
|
||||||
old_enforced = policy.enforced
|
|
||||||
|
|
||||||
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
|
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
saved_policy = serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
# Generate agent checks only if active and enforced were changed
|
|
||||||
if saved_policy.active != old_active or saved_policy.enforced != old_enforced:
|
|
||||||
generate_agent_checks_from_policies_task.delay(
|
|
||||||
policypk=policy.pk,
|
|
||||||
clear=(not saved_policy.active or not saved_policy.enforced),
|
|
||||||
create_tasks=(saved_policy.active != old_active),
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
policy = get_object_or_404(Policy, pk=pk)
|
get_object_or_404(Policy, pk=pk).delete()
|
||||||
|
|
||||||
# delete all managed policy checks off of agents
|
|
||||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk, clear=True)
|
|
||||||
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk, clear=True)
|
|
||||||
policy.delete()
|
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
@@ -104,8 +76,8 @@ class PolicyAutoTask(APIView):
|
|||||||
|
|
||||||
# tasks associated with policy
|
# tasks associated with policy
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
policy = get_object_or_404(Policy, pk=pk)
|
tasks = AutomatedTask.objects.filter(policy=pk)
|
||||||
return Response(AutoTaskPolicySerializer(policy).data)
|
return Response(AutoTasksFieldSerializer(tasks, many=True).data)
|
||||||
|
|
||||||
# get status of all tasks
|
# get status of all tasks
|
||||||
def patch(self, request, task):
|
def patch(self, request, task):
|
||||||
@@ -184,213 +156,12 @@ class GetRelated(APIView):
|
|||||||
).data
|
).data
|
||||||
|
|
||||||
response["agents"] = AgentHostnameSerializer(
|
response["agents"] = AgentHostnameSerializer(
|
||||||
policy.related_agents(),
|
policy.related_agents().only("pk", "hostname"),
|
||||||
many=True,
|
many=True,
|
||||||
).data
|
).data
|
||||||
|
|
||||||
return Response(response)
|
return Response(response)
|
||||||
|
|
||||||
# update agents, clients, sites to policy
|
|
||||||
def post(self, request):
|
|
||||||
|
|
||||||
related_type = request.data["type"]
|
|
||||||
pk = request.data["pk"]
|
|
||||||
|
|
||||||
# workstation policy is set
|
|
||||||
if (
|
|
||||||
"workstation_policy" in request.data
|
|
||||||
and request.data["workstation_policy"] != 0
|
|
||||||
):
|
|
||||||
policy = get_object_or_404(Policy, pk=request.data["workstation_policy"])
|
|
||||||
|
|
||||||
if related_type == "client":
|
|
||||||
client = get_object_or_404(Client, pk=pk)
|
|
||||||
|
|
||||||
# Check and see if workstation policy changed and regenerate policies
|
|
||||||
if (
|
|
||||||
not client.workstation_policy
|
|
||||||
or client.workstation_policy
|
|
||||||
and client.workstation_policy.pk != policy.pk
|
|
||||||
):
|
|
||||||
client.workstation_policy = policy
|
|
||||||
client.save()
|
|
||||||
|
|
||||||
generate_agent_checks_by_location_task.delay(
|
|
||||||
location={"site__client_id": client.id},
|
|
||||||
mon_type="workstation",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if related_type == "site":
|
|
||||||
site = get_object_or_404(Site, pk=pk)
|
|
||||||
|
|
||||||
# Check and see if workstation policy changed and regenerate policies
|
|
||||||
if (
|
|
||||||
not site.workstation_policy
|
|
||||||
or site.workstation_policy
|
|
||||||
and site.workstation_policy.pk != policy.pk
|
|
||||||
):
|
|
||||||
site.workstation_policy = policy
|
|
||||||
site.save()
|
|
||||||
generate_agent_checks_by_location_task.delay(
|
|
||||||
location={"site_id": site.id},
|
|
||||||
mon_type="workstation",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# server policy is set
|
|
||||||
if "server_policy" in request.data and request.data["server_policy"] != 0:
|
|
||||||
policy = get_object_or_404(Policy, pk=request.data["server_policy"])
|
|
||||||
|
|
||||||
if related_type == "client":
|
|
||||||
client = get_object_or_404(Client, pk=pk)
|
|
||||||
|
|
||||||
# Check and see if server policy changed and regenerate policies
|
|
||||||
if (
|
|
||||||
not client.server_policy
|
|
||||||
or client.server_policy
|
|
||||||
and client.server_policy.pk != policy.pk
|
|
||||||
):
|
|
||||||
client.server_policy = policy
|
|
||||||
client.save()
|
|
||||||
generate_agent_checks_by_location_task.delay(
|
|
||||||
location={"site__client_id": client.id},
|
|
||||||
mon_type="server",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if related_type == "site":
|
|
||||||
site = get_object_or_404(Site, pk=pk)
|
|
||||||
|
|
||||||
# Check and see if server policy changed and regenerate policies
|
|
||||||
if (
|
|
||||||
not site.server_policy
|
|
||||||
or site.server_policy
|
|
||||||
and site.server_policy.pk != policy.pk
|
|
||||||
):
|
|
||||||
site.server_policy = policy
|
|
||||||
site.save()
|
|
||||||
generate_agent_checks_by_location_task.delay(
|
|
||||||
location={"site_id": site.id},
|
|
||||||
mon_type="server",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# If workstation policy was cleared
|
|
||||||
if (
|
|
||||||
"workstation_policy" in request.data
|
|
||||||
and request.data["workstation_policy"] == 0
|
|
||||||
):
|
|
||||||
if related_type == "client":
|
|
||||||
client = get_object_or_404(Client, pk=pk)
|
|
||||||
|
|
||||||
# Check if workstation policy is set and update it to None
|
|
||||||
if client.workstation_policy:
|
|
||||||
|
|
||||||
client.workstation_policy = None
|
|
||||||
client.save()
|
|
||||||
generate_agent_checks_by_location_task.delay(
|
|
||||||
location={"site__client_id": client.id},
|
|
||||||
mon_type="workstation",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if related_type == "site":
|
|
||||||
site = get_object_or_404(Site, pk=pk)
|
|
||||||
|
|
||||||
# Check if workstation policy is set and update it to None
|
|
||||||
if site.workstation_policy:
|
|
||||||
|
|
||||||
site.workstation_policy = None
|
|
||||||
site.save()
|
|
||||||
generate_agent_checks_by_location_task.delay(
|
|
||||||
location={"site_id": site.id},
|
|
||||||
mon_type="workstation",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# server policy cleared
|
|
||||||
if "server_policy" in request.data and request.data["server_policy"] == 0:
|
|
||||||
|
|
||||||
if related_type == "client":
|
|
||||||
client = get_object_or_404(Client, pk=pk)
|
|
||||||
|
|
||||||
# Check if server policy is set and update it to None
|
|
||||||
if client.server_policy:
|
|
||||||
|
|
||||||
client.server_policy = None
|
|
||||||
client.save()
|
|
||||||
generate_agent_checks_by_location_task.delay(
|
|
||||||
location={"site__client_id": client.id},
|
|
||||||
mon_type="server",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if related_type == "site":
|
|
||||||
site = get_object_or_404(Site, pk=pk)
|
|
||||||
# Check if server policy is set and update it to None
|
|
||||||
if site.server_policy:
|
|
||||||
|
|
||||||
site.server_policy = None
|
|
||||||
site.save()
|
|
||||||
generate_agent_checks_by_location_task.delay(
|
|
||||||
location={"site_id": site.pk},
|
|
||||||
mon_type="server",
|
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# agent policies
|
|
||||||
if related_type == "agent":
|
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
|
||||||
|
|
||||||
if "policy" in request.data and request.data["policy"] != 0:
|
|
||||||
policy = Policy.objects.get(pk=request.data["policy"])
|
|
||||||
|
|
||||||
# Check and see if policy changed and regenerate policies
|
|
||||||
if not agent.policy or agent.policy and agent.policy.pk != policy.pk:
|
|
||||||
agent.policy = policy
|
|
||||||
agent.save()
|
|
||||||
agent.generate_checks_from_policies(clear=True)
|
|
||||||
agent.generate_tasks_from_policies(clear=True)
|
|
||||||
else:
|
|
||||||
if agent.policy:
|
|
||||||
agent.policy = None
|
|
||||||
agent.save()
|
|
||||||
agent.generate_checks_from_policies(clear=True)
|
|
||||||
agent.generate_tasks_from_policies(clear=True)
|
|
||||||
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
# view to get policies set on client, site, and workstation
|
|
||||||
def patch(self, request):
|
|
||||||
related_type = request.data["type"]
|
|
||||||
|
|
||||||
# client, site, or agent pk
|
|
||||||
pk = request.data["pk"]
|
|
||||||
|
|
||||||
if related_type == "agent":
|
|
||||||
agent = Agent.objects.get(pk=pk)
|
|
||||||
return Response(RelatedAgentPolicySerializer(agent).data)
|
|
||||||
|
|
||||||
if related_type == "site":
|
|
||||||
site = Site.objects.get(pk=pk)
|
|
||||||
return Response(RelatedSitePolicySerializer(site).data)
|
|
||||||
|
|
||||||
if related_type == "client":
|
|
||||||
client = Client.objects.get(pk=pk)
|
|
||||||
return Response(RelatedClientPolicySerializer(client).data)
|
|
||||||
|
|
||||||
content = {"error": "Data was submitted incorrectly"}
|
|
||||||
return Response(content, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdatePatchPolicy(APIView):
|
class UpdatePatchPolicy(APIView):
|
||||||
|
|
||||||
@@ -400,7 +171,7 @@ class UpdatePatchPolicy(APIView):
|
|||||||
|
|
||||||
serializer = WinUpdatePolicySerializer(data=request.data, partial=True)
|
serializer = WinUpdatePolicySerializer(data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.policy = policy
|
serializer.policy = policy # type: ignore
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
@@ -422,11 +193,15 @@ class UpdatePatchPolicy(APIView):
|
|||||||
|
|
||||||
agents = None
|
agents = None
|
||||||
if "client" in request.data:
|
if "client" in request.data:
|
||||||
agents = Agent.objects.filter(site__client_id=request.data["client"])
|
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||||
|
site__client_id=request.data["client"]
|
||||||
|
)
|
||||||
elif "site" in request.data:
|
elif "site" in request.data:
|
||||||
agents = Agent.objects.filter(site_id=request.data["site"])
|
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||||
|
site_id=request.data["site"]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
agents = Agent.objects.all()
|
agents = Agent.objects.prefetch_related("winupdatepolicy").only("pk")
|
||||||
|
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
winupdatepolicy = agent.winupdatepolicy.get()
|
winupdatepolicy = agent.winupdatepolicy.get()
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from autotasks.tasks import remove_orphaned_win_tasks
|
from autotasks.tasks import remove_orphaned_win_tasks
|
||||||
|
|
||||||
@@ -7,7 +8,7 @@ class Command(BaseCommand):
|
|||||||
help = "Checks for orphaned tasks on all agents and removes them"
|
help = "Checks for orphaned tasks on all agents and removes them"
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
agents = Agent.objects.all()
|
agents = Agent.objects.only("pk", "last_seen", "overdue_time", "offline_time")
|
||||||
online = [i for i in agents if i.status == "online"]
|
online = [i for i in agents if i.status == "online"]
|
||||||
for agent in online:
|
for agent in online:
|
||||||
remove_orphaned_win_tasks.delay(agent.pk)
|
remove_orphaned_win_tasks.delay(agent.pk)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# Generated by Django 3.0.6 on 2020-05-31 01:23
|
# Generated by Django 3.0.6 on 2020-05-31 01:23
|
||||||
|
|
||||||
import django.contrib.postgres.fields
|
import django.contrib.postgres.fields
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
from tacticalrmm.utils import get_bit_days
|
from tacticalrmm.utils import get_bit_days
|
||||||
|
|
||||||
DAYS_OF_WEEK = {
|
DAYS_OF_WEEK = {
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-27 22:21
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0010_migrate_days_to_bitdays'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='alert_severity',
|
||||||
|
field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='None', max_length=30, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-28 04:17
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0011_automatedtask_alert_severity'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='email_alert',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='email_sent',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='text_alert',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='text_sent',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-29 03:07
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0012_auto_20210128_0417'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='alert_severity',
|
||||||
|
field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-29 21:11
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0013_auto_20210129_0307'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='dashboard_alert',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-05 17:28
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0014_automatedtask_dashboard_alert'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='resolved_email_sent',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='resolved_text_sent',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-05 21:17
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0015_auto_20210205_1728'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='status',
|
||||||
|
field=models.CharField(choices=[('passing', 'Passing'), ('failing', 'Failing'), ('pending', 'Pending')], default='pending', max_length=30),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-10 15:12
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0016_automatedtask_status'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='email_sent',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='resolved_email_sent',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='resolved_text_sent',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='text_sent',
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-02-24 05:37
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0017_auto_20210210_1512'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='run_asap_after_missed',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,15 +1,21 @@
|
|||||||
import pytz
|
import datetime as dt
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import datetime as dt
|
|
||||||
|
|
||||||
from django.db import models
|
import pytz
|
||||||
|
from django.conf import settings
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
|
from django.db import models
|
||||||
from django.db.models.fields import DateTimeField
|
from django.db.models.fields import DateTimeField
|
||||||
from automation.models import Policy
|
from django.utils import timezone as djangotime
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from alerts.models import SEVERITY_CHOICES
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
from tacticalrmm.utils import bitdays_to_string
|
from tacticalrmm.utils import bitdays_to_string
|
||||||
|
|
||||||
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
RUN_TIME_DAY_CHOICES = [
|
RUN_TIME_DAY_CHOICES = [
|
||||||
(0, "Monday"),
|
(0, "Monday"),
|
||||||
(1, "Tuesday"),
|
(1, "Tuesday"),
|
||||||
@@ -33,6 +39,12 @@ SYNC_STATUS_CHOICES = [
|
|||||||
("pendingdeletion", "Pending Deletion on Agent"),
|
("pendingdeletion", "Pending Deletion on Agent"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
TASK_STATUS_CHOICES = [
|
||||||
|
("passing", "Passing"),
|
||||||
|
("failing", "Failing"),
|
||||||
|
("pending", "Pending"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class AutomatedTask(BaseAuditModel):
|
class AutomatedTask(BaseAuditModel):
|
||||||
agent = models.ForeignKey(
|
agent = models.ForeignKey(
|
||||||
@@ -43,7 +55,7 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
blank=True,
|
blank=True,
|
||||||
)
|
)
|
||||||
policy = models.ForeignKey(
|
policy = models.ForeignKey(
|
||||||
Policy,
|
"automation.Policy",
|
||||||
related_name="autotasks",
|
related_name="autotasks",
|
||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
@@ -84,6 +96,7 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
)
|
)
|
||||||
run_time_date = DateTimeField(null=True, blank=True)
|
run_time_date = DateTimeField(null=True, blank=True)
|
||||||
remove_if_not_scheduled = models.BooleanField(default=False)
|
remove_if_not_scheduled = models.BooleanField(default=False)
|
||||||
|
run_asap_after_missed = models.BooleanField(default=False) # added in agent v1.4.7
|
||||||
managed_by_policy = models.BooleanField(default=False)
|
managed_by_policy = models.BooleanField(default=False)
|
||||||
parent_task = models.PositiveIntegerField(null=True, blank=True)
|
parent_task = models.PositiveIntegerField(null=True, blank=True)
|
||||||
win_task_name = models.CharField(max_length=255, null=True, blank=True)
|
win_task_name = models.CharField(max_length=255, null=True, blank=True)
|
||||||
@@ -94,9 +107,18 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
execution_time = models.CharField(max_length=100, default="0.0000")
|
execution_time = models.CharField(max_length=100, default="0.0000")
|
||||||
last_run = models.DateTimeField(null=True, blank=True)
|
last_run = models.DateTimeField(null=True, blank=True)
|
||||||
enabled = models.BooleanField(default=True)
|
enabled = models.BooleanField(default=True)
|
||||||
|
status = models.CharField(
|
||||||
|
max_length=30, choices=TASK_STATUS_CHOICES, default="pending"
|
||||||
|
)
|
||||||
sync_status = models.CharField(
|
sync_status = models.CharField(
|
||||||
max_length=100, choices=SYNC_STATUS_CHOICES, default="notsynced"
|
max_length=100, choices=SYNC_STATUS_CHOICES, default="notsynced"
|
||||||
)
|
)
|
||||||
|
alert_severity = models.CharField(
|
||||||
|
max_length=30, choices=SEVERITY_CHOICES, default="info"
|
||||||
|
)
|
||||||
|
email_alert = models.BooleanField(default=False)
|
||||||
|
text_alert = models.BooleanField(default=False)
|
||||||
|
dashboard_alert = models.BooleanField(default=False)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
@@ -141,22 +163,49 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
def create_policy_task(self, agent=None, policy=None):
|
def create_policy_task(self, agent=None, policy=None):
|
||||||
from .tasks import create_win_task_schedule
|
from .tasks import create_win_task_schedule
|
||||||
|
|
||||||
|
# if policy is present, then this task is being copied to another policy
|
||||||
|
# if agent is present, then this task is being created on an agent from a policy
|
||||||
# exit if neither are set or if both are set
|
# exit if neither are set or if both are set
|
||||||
if not agent and not policy or agent and policy:
|
if not agent and not policy or agent and policy:
|
||||||
return
|
return
|
||||||
|
|
||||||
assigned_check = None
|
assigned_check = None
|
||||||
|
|
||||||
|
# get correct assigned check to task if set
|
||||||
if agent and self.assigned_check:
|
if agent and self.assigned_check:
|
||||||
assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.pk)
|
# check if there is a matching check on the agent
|
||||||
|
if agent.agentchecks.filter(parent_check=self.assigned_check.pk).exists():
|
||||||
|
assigned_check = agent.agentchecks.filter(
|
||||||
|
parent_check=self.assigned_check.pk
|
||||||
|
).first()
|
||||||
|
# check was overriden by agent and we need to use that agents check
|
||||||
|
else:
|
||||||
|
if agent.agentchecks.filter(
|
||||||
|
check_type=self.assigned_check.check_type, overriden_by_policy=True
|
||||||
|
).exists():
|
||||||
|
assigned_check = agent.agentchecks.filter(
|
||||||
|
check_type=self.assigned_check.check_type,
|
||||||
|
overriden_by_policy=True,
|
||||||
|
).first()
|
||||||
elif policy and self.assigned_check:
|
elif policy and self.assigned_check:
|
||||||
assigned_check = policy.policychecks.get(name=self.assigned_check.name)
|
if policy.policychecks.filter(name=self.assigned_check.name).exists():
|
||||||
|
assigned_check = policy.policychecks.filter(
|
||||||
|
name=self.assigned_check.name
|
||||||
|
).first()
|
||||||
|
else:
|
||||||
|
assigned_check = policy.policychecks.filter(
|
||||||
|
check_type=self.assigned_check.check_type
|
||||||
|
).first()
|
||||||
|
|
||||||
task = AutomatedTask.objects.create(
|
task = AutomatedTask.objects.create(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
policy=policy,
|
policy=policy,
|
||||||
managed_by_policy=bool(agent),
|
managed_by_policy=bool(agent),
|
||||||
parent_task=(self.pk if agent else None),
|
parent_task=(self.pk if agent else None),
|
||||||
|
alert_severity=self.alert_severity,
|
||||||
|
email_alert=self.email_alert,
|
||||||
|
text_alert=self.text_alert,
|
||||||
|
dashboard_alert=self.dashboard_alert,
|
||||||
script=self.script,
|
script=self.script,
|
||||||
script_args=self.script_args,
|
script_args=self.script_args,
|
||||||
assigned_check=assigned_check,
|
assigned_check=assigned_check,
|
||||||
@@ -170,6 +219,85 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
timeout=self.timeout,
|
timeout=self.timeout,
|
||||||
enabled=self.enabled,
|
enabled=self.enabled,
|
||||||
remove_if_not_scheduled=self.remove_if_not_scheduled,
|
remove_if_not_scheduled=self.remove_if_not_scheduled,
|
||||||
|
run_asap_after_missed=self.run_asap_after_missed,
|
||||||
)
|
)
|
||||||
|
|
||||||
create_win_task_schedule.delay(task.pk)
|
create_win_task_schedule.delay(task.pk)
|
||||||
|
|
||||||
|
def should_create_alert(self, alert_template):
|
||||||
|
return (
|
||||||
|
self.dashboard_alert
|
||||||
|
or self.email_alert
|
||||||
|
or self.text_alert
|
||||||
|
or (
|
||||||
|
alert_template
|
||||||
|
and (
|
||||||
|
alert_template.task_always_alert
|
||||||
|
or alert_template.task_always_email
|
||||||
|
or alert_template.task_always_text
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def send_email(self):
|
||||||
|
from core.models import CoreSettings
|
||||||
|
|
||||||
|
CORE = CoreSettings.objects.first()
|
||||||
|
alert_template = self.agent.get_alert_template()
|
||||||
|
|
||||||
|
if self.agent:
|
||||||
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||||
|
else:
|
||||||
|
subject = f"{self} Failed"
|
||||||
|
|
||||||
|
body = (
|
||||||
|
subject
|
||||||
|
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||||
|
)
|
||||||
|
|
||||||
|
CORE.send_mail(subject, body, alert_template)
|
||||||
|
|
||||||
|
def send_sms(self):
|
||||||
|
|
||||||
|
from core.models import CoreSettings
|
||||||
|
|
||||||
|
CORE = CoreSettings.objects.first()
|
||||||
|
alert_template = self.agent.get_alert_template()
|
||||||
|
|
||||||
|
if self.agent:
|
||||||
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||||
|
else:
|
||||||
|
subject = f"{self} Failed"
|
||||||
|
|
||||||
|
body = (
|
||||||
|
subject
|
||||||
|
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||||
|
)
|
||||||
|
|
||||||
|
CORE.send_sms(body, alert_template=alert_template)
|
||||||
|
|
||||||
|
def send_resolved_email(self):
|
||||||
|
from core.models import CoreSettings
|
||||||
|
|
||||||
|
alert_template = self.agent.get_alert_template()
|
||||||
|
|
||||||
|
CORE = CoreSettings.objects.first()
|
||||||
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||||
|
body = (
|
||||||
|
subject
|
||||||
|
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||||
|
)
|
||||||
|
|
||||||
|
CORE.send_mail(subject, body, alert_template=alert_template)
|
||||||
|
|
||||||
|
def send_resolved_sms(self):
|
||||||
|
from core.models import CoreSettings
|
||||||
|
|
||||||
|
alert_template = self.agent.get_alert_template()
|
||||||
|
CORE = CoreSettings.objects.first()
|
||||||
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||||
|
body = (
|
||||||
|
subject
|
||||||
|
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||||
|
)
|
||||||
|
CORE.send_sms(body, alert_template=alert_template)
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
import pytz
|
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from .models import AutomatedTask
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from scripts.models import Script
|
|
||||||
|
|
||||||
from scripts.serializers import ScriptCheckSerializer
|
|
||||||
from checks.serializers import CheckSerializer
|
from checks.serializers import CheckSerializer
|
||||||
|
from scripts.models import Script
|
||||||
|
from scripts.serializers import ScriptCheckSerializer
|
||||||
|
|
||||||
|
from .models import AutomatedTask
|
||||||
|
|
||||||
|
|
||||||
class TaskSerializer(serializers.ModelSerializer):
|
class TaskSerializer(serializers.ModelSerializer):
|
||||||
@@ -14,6 +13,24 @@ class TaskSerializer(serializers.ModelSerializer):
|
|||||||
assigned_check = CheckSerializer(read_only=True)
|
assigned_check = CheckSerializer(read_only=True)
|
||||||
schedule = serializers.ReadOnlyField()
|
schedule = serializers.ReadOnlyField()
|
||||||
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
|
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
|
||||||
|
alert_template = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
def get_alert_template(self, obj):
|
||||||
|
|
||||||
|
if obj.agent:
|
||||||
|
alert_template = obj.agent.get_alert_template()
|
||||||
|
else:
|
||||||
|
alert_template = None
|
||||||
|
|
||||||
|
if not alert_template:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"name": alert_template.name,
|
||||||
|
"always_email": alert_template.task_always_email,
|
||||||
|
"always_text": alert_template.task_always_text,
|
||||||
|
"always_alert": alert_template.task_always_alert,
|
||||||
|
}
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = AutomatedTask
|
model = AutomatedTask
|
||||||
|
|||||||
@@ -1,14 +1,19 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
from loguru import logger
|
import random
|
||||||
from tacticalrmm.celery import app
|
from time import sleep
|
||||||
from django.conf import settings
|
from typing import Union
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
from loguru import logger
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
|
|
||||||
from .models import AutomatedTask
|
|
||||||
from logs.models import PendingAction
|
from logs.models import PendingAction
|
||||||
|
from tacticalrmm.celery import app
|
||||||
|
|
||||||
|
from .models import AutomatedTask
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
@@ -40,7 +45,7 @@ def create_win_task_schedule(pk, pending_action=False):
|
|||||||
task.run_time_date = now.astimezone(agent_tz).replace(
|
task.run_time_date = now.astimezone(agent_tz).replace(
|
||||||
tzinfo=pytz.utc
|
tzinfo=pytz.utc
|
||||||
) + djangotime.timedelta(minutes=5)
|
) + djangotime.timedelta(minutes=5)
|
||||||
task.save()
|
task.save(update_fields=["run_time_date"])
|
||||||
|
|
||||||
nats_data = {
|
nats_data = {
|
||||||
"func": "schedtask",
|
"func": "schedtask",
|
||||||
@@ -57,9 +62,12 @@ def create_win_task_schedule(pk, pending_action=False):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if task.remove_if_not_scheduled and pyver.parse(
|
if task.run_asap_after_missed and pyver.parse(
|
||||||
task.agent.version
|
task.agent.version
|
||||||
) >= pyver.parse("1.1.2"):
|
) >= pyver.parse("1.4.7"):
|
||||||
|
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||||
|
|
||||||
|
if task.remove_if_not_scheduled:
|
||||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||||
|
|
||||||
elif task.task_type == "checkfailure" or task.task_type == "manual":
|
elif task.task_type == "checkfailure" or task.task_type == "manual":
|
||||||
@@ -76,9 +84,14 @@ def create_win_task_schedule(pk, pending_action=False):
|
|||||||
return "error"
|
return "error"
|
||||||
|
|
||||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||||
|
|
||||||
if r != "ok":
|
if r != "ok":
|
||||||
# don't create pending action if this task was initiated by a pending action
|
# don't create pending action if this task was initiated by a pending action
|
||||||
if not pending_action:
|
if not pending_action:
|
||||||
|
|
||||||
|
# complete any other pending actions on agent with same task_id
|
||||||
|
task.agent.remove_matching_pending_task_actions(task.id)
|
||||||
|
|
||||||
PendingAction(
|
PendingAction(
|
||||||
agent=task.agent,
|
agent=task.agent,
|
||||||
action_type="taskaction",
|
action_type="taskaction",
|
||||||
@@ -144,6 +157,7 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
|||||||
|
|
||||||
task.sync_status = "synced"
|
task.sync_status = "synced"
|
||||||
task.save(update_fields=["sync_status"])
|
task.save(update_fields=["sync_status"])
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@@ -157,9 +171,13 @@ def delete_win_task_schedule(pk, pending_action=False):
|
|||||||
}
|
}
|
||||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||||
|
|
||||||
if r != "ok":
|
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||||
# don't create pending action if this task was initiated by a pending action
|
# don't create pending action if this task was initiated by a pending action
|
||||||
if not pending_action:
|
if not pending_action:
|
||||||
|
|
||||||
|
# complete any other pending actions on agent with same task_id
|
||||||
|
task.agent.remove_matching_pending_task_actions(task.id)
|
||||||
|
|
||||||
PendingAction(
|
PendingAction(
|
||||||
agent=task.agent,
|
agent=task.agent,
|
||||||
action_type="taskaction",
|
action_type="taskaction",
|
||||||
@@ -168,7 +186,7 @@ def delete_win_task_schedule(pk, pending_action=False):
|
|||||||
task.sync_status = "pendingdeletion"
|
task.sync_status = "pendingdeletion"
|
||||||
task.save(update_fields=["sync_status"])
|
task.save(update_fields=["sync_status"])
|
||||||
|
|
||||||
return
|
return "timeout"
|
||||||
|
|
||||||
# complete pending action since it was successful
|
# complete pending action since it was successful
|
||||||
if pending_action:
|
if pending_action:
|
||||||
@@ -177,10 +195,7 @@ def delete_win_task_schedule(pk, pending_action=False):
|
|||||||
pendingaction.save(update_fields=["status"])
|
pendingaction.save(update_fields=["status"])
|
||||||
|
|
||||||
# complete any other pending actions on agent with same task_id
|
# complete any other pending actions on agent with same task_id
|
||||||
for action in task.agent.pendingactions.all():
|
task.agent.remove_matching_pending_task_actions(task.id)
|
||||||
if action.details["task_id"] == task.id:
|
|
||||||
action.status = "completed"
|
|
||||||
action.save()
|
|
||||||
|
|
||||||
task.delete()
|
task.delete()
|
||||||
return "ok"
|
return "ok"
|
||||||
@@ -236,3 +251,85 @@ def remove_orphaned_win_tasks(agentpk):
|
|||||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||||
|
|
||||||
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")
|
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||||
|
from alerts.models import Alert
|
||||||
|
|
||||||
|
alert = Alert.objects.get(pk=pk)
|
||||||
|
|
||||||
|
# first time sending email
|
||||||
|
if not alert.email_sent:
|
||||||
|
sleep(random.randint(1, 10))
|
||||||
|
alert.assigned_task.send_email()
|
||||||
|
alert.email_sent = djangotime.now()
|
||||||
|
alert.save(update_fields=["email_sent"])
|
||||||
|
else:
|
||||||
|
if alert_interval:
|
||||||
|
# send an email only if the last email sent is older than alert interval
|
||||||
|
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||||
|
if alert.email_sent < delta:
|
||||||
|
sleep(random.randint(1, 10))
|
||||||
|
alert.assigned_task.send_email()
|
||||||
|
alert.email_sent = djangotime.now()
|
||||||
|
alert.save(update_fields=["email_sent"])
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||||
|
from alerts.models import Alert
|
||||||
|
|
||||||
|
alert = Alert.objects.get(pk=pk)
|
||||||
|
|
||||||
|
# first time sending text
|
||||||
|
if not alert.sms_sent:
|
||||||
|
sleep(random.randint(1, 3))
|
||||||
|
alert.assigned_task.send_sms()
|
||||||
|
alert.sms_sent = djangotime.now()
|
||||||
|
alert.save(update_fields=["sms_sent"])
|
||||||
|
else:
|
||||||
|
if alert_interval:
|
||||||
|
# send a text only if the last text sent is older than alert interval
|
||||||
|
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||||
|
if alert.sms_sent < delta:
|
||||||
|
sleep(random.randint(1, 3))
|
||||||
|
alert.assigned_task.send_sms()
|
||||||
|
alert.sms_sent = djangotime.now()
|
||||||
|
alert.save(update_fields=["sms_sent"])
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def handle_resolved_task_sms_alert(pk: int) -> str:
|
||||||
|
from alerts.models import Alert
|
||||||
|
|
||||||
|
alert = Alert.objects.get(pk=pk)
|
||||||
|
|
||||||
|
# first time sending text
|
||||||
|
if not alert.resolved_sms_sent:
|
||||||
|
sleep(random.randint(1, 3))
|
||||||
|
alert.assigned_task.send_resolved_sms()
|
||||||
|
alert.resolved_sms_sent = djangotime.now()
|
||||||
|
alert.save(update_fields=["resolved_sms_sent"])
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def handle_resolved_task_email_alert(pk: int) -> str:
|
||||||
|
from alerts.models import Alert
|
||||||
|
|
||||||
|
alert = Alert.objects.get(pk=pk)
|
||||||
|
|
||||||
|
# first time sending email
|
||||||
|
if not alert.resolved_email_sent:
|
||||||
|
sleep(random.randint(1, 10))
|
||||||
|
alert.assigned_task.send_resolved_email()
|
||||||
|
alert.resolved_email_sent = djangotime.now()
|
||||||
|
alert.save(update_fields=["resolved_email_sent"])
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
import datetime as dt
|
import datetime as dt
|
||||||
from unittest.mock import patch, call
|
from unittest.mock import call, patch
|
||||||
from model_bakery import baker
|
|
||||||
from django.utils import timezone as djangotime
|
|
||||||
|
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
from model_bakery import baker
|
||||||
|
|
||||||
|
from logs.models import PendingAction
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
from .models import AutomatedTask
|
from .models import AutomatedTask
|
||||||
from logs.models import PendingAction
|
|
||||||
from .serializers import AutoTaskSerializer
|
from .serializers import AutoTaskSerializer
|
||||||
from .tasks import remove_orphaned_win_tasks, run_win_task, create_win_task_schedule
|
from .tasks import create_win_task_schedule, remove_orphaned_win_tasks, run_win_task
|
||||||
|
|
||||||
|
|
||||||
class TestAutotaskViews(TacticalTestCase):
|
class TestAutotaskViews(TacticalTestCase):
|
||||||
@@ -150,7 +151,9 @@ class TestAutotaskViews(TacticalTestCase):
|
|||||||
|
|
||||||
resp = self.client.patch(url, data, format="json")
|
resp = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
update_policy_task_fields_task.assert_called_with(policy_task.id, True)
|
update_policy_task_fields_task.assert_called_with(
|
||||||
|
policy_task.id, update_agent=True
|
||||||
|
)
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
|
|||||||
@@ -1,32 +1,28 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import pytz
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
|
|
||||||
from rest_framework.views import APIView
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.decorators import api_view
|
from rest_framework.decorators import api_view
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from .models import AutomatedTask
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from checks.models import Check
|
from checks.models import Check
|
||||||
|
|
||||||
from scripts.models import Script
|
from scripts.models import Script
|
||||||
from core.models import CoreSettings
|
from tacticalrmm.utils import get_bit_days, get_default_timezone, notify_error
|
||||||
|
|
||||||
from .serializers import TaskSerializer, AutoTaskSerializer
|
|
||||||
|
|
||||||
|
from .models import AutomatedTask
|
||||||
|
from .serializers import AutoTaskSerializer, TaskSerializer
|
||||||
from .tasks import (
|
from .tasks import (
|
||||||
create_win_task_schedule,
|
create_win_task_schedule,
|
||||||
delete_win_task_schedule,
|
delete_win_task_schedule,
|
||||||
enable_or_disable_win_task,
|
enable_or_disable_win_task,
|
||||||
)
|
)
|
||||||
from tacticalrmm.utils import notify_error, get_bit_days
|
|
||||||
|
|
||||||
|
|
||||||
class AddAutoTask(APIView):
|
class AddAutoTask(APIView):
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
from automation.tasks import generate_agent_tasks_from_policies_task
|
|
||||||
from automation.models import Policy
|
from automation.models import Policy
|
||||||
|
from automation.tasks import generate_agent_tasks_from_policies_task
|
||||||
|
|
||||||
data = request.data
|
data = request.data
|
||||||
script = get_object_or_404(Script, pk=data["autotask"]["script"])
|
script = get_object_or_404(Script, pk=data["autotask"]["script"])
|
||||||
@@ -76,11 +72,25 @@ class AutoTask(APIView):
|
|||||||
|
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
ctx = {
|
ctx = {
|
||||||
"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone),
|
"default_tz": get_default_timezone(),
|
||||||
"agent_tz": agent.time_zone,
|
"agent_tz": agent.time_zone,
|
||||||
}
|
}
|
||||||
return Response(AutoTaskSerializer(agent, context=ctx).data)
|
return Response(AutoTaskSerializer(agent, context=ctx).data)
|
||||||
|
|
||||||
|
def put(self, request, pk):
|
||||||
|
from automation.tasks import update_policy_task_fields_task
|
||||||
|
|
||||||
|
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||||
|
|
||||||
|
serializer = TaskSerializer(instance=task, data=request.data, partial=True)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
|
||||||
|
if task.policy:
|
||||||
|
update_policy_task_fields_task.delay(task.pk)
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
def patch(self, request, pk):
|
def patch(self, request, pk):
|
||||||
from automation.tasks import update_policy_task_fields_task
|
from automation.tasks import update_policy_task_fields_task
|
||||||
|
|
||||||
@@ -93,7 +103,7 @@ class AutoTask(APIView):
|
|||||||
enable_or_disable_win_task.delay(pk=task.pk, action=action)
|
enable_or_disable_win_task.delay(pk=task.pk, action=action)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
update_policy_task_fields_task.delay(task.pk, action)
|
update_policy_task_fields_task.delay(task.pk, update_agent=True)
|
||||||
|
|
||||||
task.enabled = action
|
task.enabled = action
|
||||||
task.save(update_fields=["enabled"])
|
task.save(update_fields=["enabled"])
|
||||||
|
|||||||
@@ -1,21 +1,27 @@
|
|||||||
from .models import Check
|
from model_bakery.recipe import Recipe
|
||||||
from model_bakery.recipe import Recipe, seq
|
|
||||||
|
|
||||||
check = Recipe(Check)
|
check = Recipe("checks.Check")
|
||||||
|
|
||||||
diskspace_check = check.extend(check_type="diskspace", disk="C:", threshold=75)
|
diskspace_check = check.extend(
|
||||||
|
check_type="diskspace", disk="C:", warning_threshold=30, error_threshold=10
|
||||||
|
)
|
||||||
|
|
||||||
cpuload_check = check.extend(check_type="cpuload", threshold=75)
|
cpuload_check = check.extend(
|
||||||
|
check_type="cpuload", warning_threshold=30, error_threshold=75
|
||||||
|
)
|
||||||
|
|
||||||
ping_check = check.extend(check_type="ping", ip="10.10.10.10")
|
ping_check = check.extend(check_type="ping", ip="10.10.10.10")
|
||||||
|
|
||||||
memory_check = check.extend(check_type="memory", threshold=75)
|
memory_check = check.extend(
|
||||||
|
check_type="memory", warning_threshold=60, error_threshold=75
|
||||||
|
)
|
||||||
|
|
||||||
winsvc_check = check.extend(
|
winsvc_check = check.extend(
|
||||||
check_type="winsvc",
|
check_type="winsvc",
|
||||||
svc_name="ServiceName",
|
svc_name="ServiceName",
|
||||||
svc_display_name="ServiceName",
|
svc_display_name="ServiceName",
|
||||||
svc_policy_mode="manual",
|
svc_policy_mode="manual",
|
||||||
|
pass_if_svc_not_exist=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
eventlog_check = check.extend(
|
eventlog_check = check.extend(
|
||||||
|
|||||||
@@ -3,8 +3,8 @@
|
|||||||
import django.contrib.postgres.fields
|
import django.contrib.postgres.fields
|
||||||
import django.contrib.postgres.fields.jsonb
|
import django.contrib.postgres.fields.jsonb
|
||||||
import django.core.validators
|
import django.core.validators
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.1.4 on 2021-01-09 21:36
|
# Generated by Django 3.1.4 on 2021-01-09 21:36
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
43
api/tacticalrmm/checks/migrations/0016_auto_20210123_0149.py
Normal file
43
api/tacticalrmm/checks/migrations/0016_auto_20210123_0149.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-23 01:49
|
||||||
|
|
||||||
|
import django.contrib.postgres.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('checks', '0015_auto_20210110_1808'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='check',
|
||||||
|
name='threshold',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='check',
|
||||||
|
name='alert_severity',
|
||||||
|
field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='warning', max_length=15),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='check',
|
||||||
|
name='error_threshold',
|
||||||
|
field=models.PositiveIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='check',
|
||||||
|
name='info_return_codes',
|
||||||
|
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveIntegerField(), blank=True, default=list, null=True, size=None),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='check',
|
||||||
|
name='warning_return_codes',
|
||||||
|
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveIntegerField(), blank=True, default=list, null=True, size=None),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='check',
|
||||||
|
name='warning_threshold',
|
||||||
|
field=models.PositiveIntegerField(blank=True, default=0, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-29 21:11
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('checks', '0016_auto_20210123_0149'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='check',
|
||||||
|
name='dashboard_alert',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
api/tacticalrmm/checks/migrations/0018_auto_20210205_1647.py
Normal file
18
api/tacticalrmm/checks/migrations/0018_auto_20210205_1647.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-05 16:47
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('checks', '0017_check_dashboard_alert'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='check',
|
||||||
|
name='alert_severity',
|
||||||
|
field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='warning', max_length=15, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
23
api/tacticalrmm/checks/migrations/0019_auto_20210205_1728.py
Normal file
23
api/tacticalrmm/checks/migrations/0019_auto_20210205_1728.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-05 17:28
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('checks', '0018_auto_20210205_1647'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='check',
|
||||||
|
name='resolved_email_sent',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='check',
|
||||||
|
name='resolved_text_sent',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
29
api/tacticalrmm/checks/migrations/0020_auto_20210210_1512.py
Normal file
29
api/tacticalrmm/checks/migrations/0020_auto_20210210_1512.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-02-10 15:12
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('checks', '0019_auto_20210205_1728'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='check',
|
||||||
|
name='email_sent',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='check',
|
||||||
|
name='resolved_email_sent',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='check',
|
||||||
|
name='resolved_text_sent',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='check',
|
||||||
|
name='text_sent',
|
||||||
|
),
|
||||||
|
]
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user