Compare commits
322 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8eb91c08aa | ||
|
|
ded5437522 | ||
|
|
9348657951 | ||
|
|
bca85933f7 | ||
|
|
4b84062d62 | ||
|
|
d6d0f8fa17 | ||
|
|
dd72c875d3 | ||
|
|
1a1df50300 | ||
|
|
53cbb527b4 | ||
|
|
8b87b2717e | ||
|
|
1007d6dac7 | ||
|
|
6799fac120 | ||
|
|
558e6288ca | ||
|
|
d9cb73291b | ||
|
|
d0f7be3ac3 | ||
|
|
331e16d3ca | ||
|
|
0db246c311 | ||
|
|
94dc62ff58 | ||
|
|
e68ecf6844 | ||
|
|
5167b0a8c6 | ||
|
|
77e3d3786d | ||
|
|
708d4d39bc | ||
|
|
2a8cda2a1e | ||
|
|
8d783840ad | ||
|
|
abe39d5790 | ||
|
|
d7868e9e5a | ||
|
|
7b84e36e15 | ||
|
|
6cab6d69d8 | ||
|
|
87846d7aef | ||
|
|
2557769c6a | ||
|
|
48375f3878 | ||
|
|
176c85d8c1 | ||
|
|
17cad71ede | ||
|
|
e8bf9d4e6f | ||
|
|
7bdd2038ef | ||
|
|
e9f6e7943a | ||
|
|
e74ba387ab | ||
|
|
27c79e5b99 | ||
|
|
8170d5ea73 | ||
|
|
196f73705d | ||
|
|
ad0bbf5248 | ||
|
|
4cae9cd90d | ||
|
|
be7bc55a76 | ||
|
|
684b545e8f | ||
|
|
7835cc3b10 | ||
|
|
f8706b51e8 | ||
|
|
d97f8fd5da | ||
|
|
f8fa87441e | ||
|
|
d42537814a | ||
|
|
792421b0e2 | ||
|
|
72d55a010b | ||
|
|
880d8258ce | ||
|
|
b79bf82efb | ||
|
|
b3118b6253 | ||
|
|
ba172e2e25 | ||
|
|
892d53abeb | ||
|
|
5cbaa1ce98 | ||
|
|
7b35d9ad2e | ||
|
|
8462de7911 | ||
|
|
8721f44298 | ||
|
|
c7a2d69afa | ||
|
|
0453d81e7a | ||
|
|
501c04ac2b | ||
|
|
0ef4e9a5c3 | ||
|
|
129c50e598 | ||
|
|
3e276fc2ac | ||
|
|
658d5e05ae | ||
|
|
4e7d5d476e | ||
|
|
6a55ca20f3 | ||
|
|
c56c537f7f | ||
|
|
fd7d776121 | ||
|
|
1af28190d8 | ||
|
|
6b305be567 | ||
|
|
3bf70513b7 | ||
|
|
7e64404654 | ||
|
|
e1b5226f34 | ||
|
|
0d7128ad31 | ||
|
|
5778626087 | ||
|
|
3ff48756ed | ||
|
|
0ce9a6eeba | ||
|
|
ad527b4aed | ||
|
|
6633bb452e | ||
|
|
efeb0b4feb | ||
|
|
8cc11fc102 | ||
|
|
ee6a167220 | ||
|
|
8d4ad3c405 | ||
|
|
072fbf4d60 | ||
|
|
727c41c283 | ||
|
|
e2266838b6 | ||
|
|
775762d615 | ||
|
|
900c3008cb | ||
|
|
09379213a6 | ||
|
|
ceb97048e3 | ||
|
|
4561515517 | ||
|
|
a7b285759f | ||
|
|
b4531b2a12 | ||
|
|
9e1d261c76 | ||
|
|
e35fa15cd2 | ||
|
|
dbd1f0d4f9 | ||
|
|
9ade78b703 | ||
|
|
f20e244b5f | ||
|
|
0989308b7e | ||
|
|
12c7140536 | ||
|
|
2a0b605e92 | ||
|
|
6978890e6a | ||
|
|
561abd6cb9 | ||
|
|
4dd6227f0b | ||
|
|
1ec314c31c | ||
|
|
a2be5a00be | ||
|
|
4e2241c115 | ||
|
|
8459bca64a | ||
|
|
24cb0565b9 | ||
|
|
9442acb028 | ||
|
|
4f7f181a42 | ||
|
|
b7dd8737a7 | ||
|
|
2207eeb727 | ||
|
|
89dad7dfe7 | ||
|
|
e5803d0cf3 | ||
|
|
c1fffe9ae6 | ||
|
|
9e6cbd3d32 | ||
|
|
2ea8742510 | ||
|
|
5cfa0254f9 | ||
|
|
8cd2544f78 | ||
|
|
c03b768364 | ||
|
|
d60481ead4 | ||
|
|
126be3827d | ||
|
|
121274dca2 | ||
|
|
0ecf8da27e | ||
|
|
4a6bcb525d | ||
|
|
83f9ee50dd | ||
|
|
2bff297f79 | ||
|
|
dee68f6933 | ||
|
|
afa1e19c83 | ||
|
|
6052088eb4 | ||
|
|
c7fa5167c4 | ||
|
|
1034b0b146 | ||
|
|
8bcc4e5945 | ||
|
|
c3c24aa1db | ||
|
|
281c75d2d2 | ||
|
|
52307420f3 | ||
|
|
6185347cd8 | ||
|
|
b6cd29f77e | ||
|
|
b8ea8b1567 | ||
|
|
2f7dc98830 | ||
|
|
e248a99f79 | ||
|
|
4fb6d9aa5d | ||
|
|
f092ea8d67 | ||
|
|
c32cbbdda6 | ||
|
|
2497675259 | ||
|
|
8d084ab90a | ||
|
|
2398773ef0 | ||
|
|
a05998a30e | ||
|
|
f863c29194 | ||
|
|
d16a98c788 | ||
|
|
9421b02e96 | ||
|
|
10256864e4 | ||
|
|
85d010615d | ||
|
|
cd1cb186be | ||
|
|
4458354d70 | ||
|
|
0f27da8808 | ||
|
|
dd76bfa3c2 | ||
|
|
5780a66f7d | ||
|
|
d4342c034c | ||
|
|
1ec43f2530 | ||
|
|
3c300d8fdf | ||
|
|
23119b55d1 | ||
|
|
c8fb0e8f8a | ||
|
|
0ec32a77ef | ||
|
|
52921bfce8 | ||
|
|
960b929097 | ||
|
|
d4ce23eced | ||
|
|
6925510f44 | ||
|
|
9827ad4c22 | ||
|
|
ef8aaee028 | ||
|
|
3d7d39f248 | ||
|
|
3eac620560 | ||
|
|
ab17006956 | ||
|
|
bfc6889ee9 | ||
|
|
0ec0b4a044 | ||
|
|
f1a523f327 | ||
|
|
4181449aea | ||
|
|
e192f8db52 | ||
|
|
8097c681ac | ||
|
|
f45938bdd5 | ||
|
|
6ea4e97eca | ||
|
|
f274c8e837 | ||
|
|
335e571485 | ||
|
|
a11616aace | ||
|
|
883acadbc4 | ||
|
|
f51e6a3fcf | ||
|
|
371e081c0d | ||
|
|
6f41b3bf1c | ||
|
|
c1d74a6c9e | ||
|
|
24eaa6796e | ||
|
|
1521e3b620 | ||
|
|
b6ff38dd62 | ||
|
|
44ea9ac03c | ||
|
|
4c2701505b | ||
|
|
9022fe18da | ||
|
|
63be349f8b | ||
|
|
c40256a290 | ||
|
|
33ecb8ec52 | ||
|
|
82d62a0015 | ||
|
|
6278240526 | ||
|
|
8c2dc5f57d | ||
|
|
2e5868778a | ||
|
|
a10b8dab9b | ||
|
|
92f4f7ef59 | ||
|
|
31257bd5cb | ||
|
|
bb6510862f | ||
|
|
797ecf0780 | ||
|
|
f9536dc67f | ||
|
|
e8b95362af | ||
|
|
bdc39ad4ec | ||
|
|
4a202c5585 | ||
|
|
3c6b321f73 | ||
|
|
cb29b52799 | ||
|
|
7e48015a54 | ||
|
|
9ed3abf932 | ||
|
|
61762828a3 | ||
|
|
59beabe5ac | ||
|
|
0b30faa28c | ||
|
|
d12d49b93f | ||
|
|
f1d64d275a | ||
|
|
d094eeeb03 | ||
|
|
be25af658e | ||
|
|
794f52c229 | ||
|
|
5d4dc4ed4c | ||
|
|
e49d97b898 | ||
|
|
b6b4f1ba62 | ||
|
|
653d476716 | ||
|
|
48b855258c | ||
|
|
c7efdaf5f9 | ||
|
|
22523ed3d3 | ||
|
|
33c602dd61 | ||
|
|
e2a5509b76 | ||
|
|
61a0fa1a89 | ||
|
|
a35bd8292b | ||
|
|
06c8ae60e3 | ||
|
|
deeab1f845 | ||
|
|
da81c4c987 | ||
|
|
d180f1b2d5 | ||
|
|
526135629c | ||
|
|
6b9493e057 | ||
|
|
9bb33d2afc | ||
|
|
7421138533 | ||
|
|
d0800c52bb | ||
|
|
913fcd4df2 | ||
|
|
83322cc725 | ||
|
|
5944501feb | ||
|
|
17e3603d3d | ||
|
|
95be43ae47 | ||
|
|
feb91cbbaa | ||
|
|
79409af168 | ||
|
|
5dbfb64822 | ||
|
|
5e7ebf5e69 | ||
|
|
e73215ca74 | ||
|
|
a5f123b9ce | ||
|
|
ac058e9675 | ||
|
|
371b764d1d | ||
|
|
66d7172e09 | ||
|
|
99d3a8a749 | ||
|
|
db5ff372a4 | ||
|
|
3fe83f81be | ||
|
|
669e638fd6 | ||
|
|
f1f999f3b6 | ||
|
|
6f3b6fa9ce | ||
|
|
938f945301 | ||
|
|
e3efb2aad6 | ||
|
|
1e678c0d78 | ||
|
|
a59c111140 | ||
|
|
a8b2a31bed | ||
|
|
37402f9ee8 | ||
|
|
e7b5ecb40f | ||
|
|
c817ef04b9 | ||
|
|
f52b18439c | ||
|
|
1e03c628d5 | ||
|
|
71fb39db1f | ||
|
|
bcfb3726b0 | ||
|
|
c6e9e29671 | ||
|
|
1bfefcce39 | ||
|
|
22488e93e1 | ||
|
|
244b89f035 | ||
|
|
1f9a241b94 | ||
|
|
03641aae42 | ||
|
|
a2bdd113cc | ||
|
|
a92e2f3c7b | ||
|
|
97766b3a57 | ||
|
|
9ef4c3bb06 | ||
|
|
d82f0cd757 | ||
|
|
5f529e2af4 | ||
|
|
beadd9e02b | ||
|
|
72543789cb | ||
|
|
5789439fa9 | ||
|
|
f549126bcf | ||
|
|
7197548bad | ||
|
|
241fde783c | ||
|
|
2b872cd1f4 | ||
|
|
a606fb4d1d | ||
|
|
9f9c6be38e | ||
|
|
01ee524049 | ||
|
|
af9cb65338 | ||
|
|
8aa11c580b | ||
|
|
ada627f444 | ||
|
|
a7b6d338c3 | ||
|
|
9f00538b97 | ||
|
|
a085015282 | ||
|
|
0b9c220fbb | ||
|
|
0e3d04873d | ||
|
|
b7578d939f | ||
|
|
b5c28de03f | ||
|
|
e17d25c156 | ||
|
|
c25dc1b99c | ||
|
|
a493a574bd | ||
|
|
4284493dce | ||
|
|
25059de8e1 | ||
|
|
1731b05ad0 | ||
|
|
e80dc663ac | ||
|
|
39988a4c2f | ||
|
|
415bff303a | ||
|
|
a65eb62a54 | ||
|
|
03b2982128 |
@@ -23,6 +23,6 @@ POSTGRES_USER=postgres
|
||||
POSTGRES_PASS=postgrespass
|
||||
|
||||
# DEV SETTINGS
|
||||
APP_PORT=8000
|
||||
API_PORT=8080
|
||||
APP_PORT=80
|
||||
API_PORT=80
|
||||
HTTP_PROTOCOL=https
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.8-slim
|
||||
FROM python:3.9.2-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||
@@ -15,7 +15,7 @@ RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# Copy Go Files
|
||||
COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||
COPY --from=golang:1.16 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||
|
||||
# Copy Dev python reqs
|
||||
COPY ./requirements.txt /
|
||||
|
||||
@@ -3,6 +3,7 @@ version: '3.4'
|
||||
services:
|
||||
api-dev:
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
@@ -21,6 +22,7 @@ services:
|
||||
|
||||
app-dev:
|
||||
image: node:12-alpine
|
||||
restart: always
|
||||
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
working_dir: /workspace/web
|
||||
volumes:
|
||||
|
||||
@@ -45,7 +45,7 @@ function django_setup {
|
||||
echo "setting up django environment"
|
||||
|
||||
# configure django settings
|
||||
MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token)
|
||||
MESH_TOKEN="$(cat ${TACTICAL_DIR}/tmp/mesh_token)"
|
||||
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
|
||||
@@ -100,35 +100,35 @@ MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
REDIS_HOST = '${REDIS_HOST}'
|
||||
ADMIN_ENABLED = True
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
|
||||
# run migrations and init scripts
|
||||
python manage.py migrate --no-input
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py initial_db_setup
|
||||
python manage.py initial_mesh_setup
|
||||
python manage.py load_chocos
|
||||
python manage.py load_community_scripts
|
||||
python manage.py reload_nats
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_mesh_setup
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_chocos
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py reload_nats
|
||||
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
|
||||
}
|
||||
|
||||
if [ "$1" = 'tactical-init-dev' ]; then
|
||||
|
||||
# make directories if they don't exist
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
mkdir -p "${TACTICAL_DIR}/tmp"
|
||||
|
||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||
|
||||
# setup Python virtual env and install dependencies
|
||||
test -f ${VIRTUAL_ENV} && python -m venv --copies ${VIRTUAL_ENV}
|
||||
pip install --no-cache-dir -r /requirements.txt
|
||||
! test -e "${VIRTUAL_ENV}" && python -m venv ${VIRTUAL_ENV}
|
||||
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt
|
||||
|
||||
django_setup
|
||||
|
||||
@@ -150,20 +150,20 @@ EOF
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-api' ]; then
|
||||
cp ${WORKSPACE_DIR}/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||
cp "${WORKSPACE_DIR}"/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||
chmod +x /usr/local/bin/goversioninfo
|
||||
|
||||
check_tactical_ready
|
||||
python manage.py runserver 0.0.0.0:${API_PORT}
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}"
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celery-dev' ]; then
|
||||
check_tactical_ready
|
||||
env/bin/celery -A tacticalrmm worker -l debug
|
||||
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm worker -l debug
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||
check_tactical_ready
|
||||
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||
env/bin/celery -A tacticalrmm beat -l debug
|
||||
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug
|
||||
fi
|
||||
|
||||
@@ -1,44 +1,32 @@
|
||||
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||
amqp==2.6.1
|
||||
asgiref==3.3.1
|
||||
asyncio-nats-client==0.11.4
|
||||
billiard==3.6.3.0
|
||||
celery==4.4.6
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.3
|
||||
chardet==3.0.4
|
||||
cryptography==3.2.1
|
||||
decorator==4.4.2
|
||||
Django==3.1.4
|
||||
django-cors-headers==3.5.0
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.2
|
||||
future==0.18.2
|
||||
idna==2.10
|
||||
kombu==4.6.11
|
||||
loguru==0.5.3
|
||||
msgpack==1.0.0
|
||||
packaging==20.4
|
||||
psycopg2-binary==2.8.6
|
||||
pycparser==2.20
|
||||
pycryptodome==3.9.9
|
||||
pyotp==2.4.1
|
||||
pyparsing==2.4.7
|
||||
pytz==2020.4
|
||||
qrcode==6.1
|
||||
redis==3.5.3
|
||||
requests==2.25.0
|
||||
six==1.15.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.49.0
|
||||
urllib3==1.26.2
|
||||
validators==0.18.1
|
||||
vine==1.3.0
|
||||
websockets==8.1
|
||||
zipp==3.4.0
|
||||
asyncio-nats-client
|
||||
celery
|
||||
Django
|
||||
django-cors-headers
|
||||
django-rest-knox
|
||||
djangorestframework
|
||||
loguru
|
||||
msgpack
|
||||
psycopg2-binary
|
||||
pycparser
|
||||
pycryptodome
|
||||
pyotp
|
||||
pyparsing
|
||||
pytz
|
||||
qrcode
|
||||
redis
|
||||
twilio
|
||||
packaging
|
||||
validators
|
||||
websockets
|
||||
black
|
||||
Werkzeug
|
||||
django-extensions
|
||||
coverage
|
||||
coveralls
|
||||
model_bakery
|
||||
mkdocs
|
||||
mkdocs-material
|
||||
pymdown-extensions
|
||||
Pygments
|
||||
mypy
|
||||
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -3,7 +3,7 @@
|
||||
github: wh1te909
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
ko_fi: tacticalrmm
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
|
||||
22
.github/workflows/deploy-docs.yml
vendored
Normal file
22
.github/workflows/deploy-docs.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
name: Deploy Docs
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: docs
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.x
|
||||
- run: pip install --upgrade pip
|
||||
- run: pip install --upgrade setuptools wheel
|
||||
- run: pip install mkdocs mkdocs-material pymdown-extensions
|
||||
- run: mkdocs gh-deploy --force
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -45,3 +45,5 @@ htmlcov/
|
||||
docker-compose.dev.yml
|
||||
docs/.vuepress/dist
|
||||
nats-rmm.conf
|
||||
.mypy_cache
|
||||
docs/site/
|
||||
|
||||
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@@ -3,7 +3,14 @@
|
||||
"python.languageServer": "Pylance",
|
||||
"python.analysis.extraPaths": [
|
||||
"api/tacticalrmm",
|
||||
"api/env",
|
||||
],
|
||||
"python.analysis.diagnosticSeverityOverrides": {
|
||||
"reportUnusedImport": "error",
|
||||
"reportDuplicateImport": "error",
|
||||
},
|
||||
"python.analysis.memory.keepLibraryAst": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.formatting.provider": "black",
|
||||
"editor.formatOnSave": true,
|
||||
|
||||
100
README.md
100
README.md
@@ -8,13 +8,15 @@
|
||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
*Tactical RMM is currently in alpha and subject to breaking changes. Use in production at your own risk.*
|
||||
|
||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||
|
||||
### [Documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
|
||||
## Features
|
||||
|
||||
- Teamviewer-like remote desktop control
|
||||
@@ -33,98 +35,6 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
||||
|
||||
- Windows 7, 8.1, 10, Server 2008R2, 2012R2, 2016, 2019
|
||||
|
||||
## Installation
|
||||
## Installation / Backup / Restore / Usage
|
||||
|
||||
### Requirements
|
||||
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10)
|
||||
- A domain you own with at least 3 subdomains
|
||||
- Google Authenticator app (2 factor is NOT optional)
|
||||
|
||||
### Docker
|
||||
Refer to the [docker setup](docker/readme.md)
|
||||
|
||||
|
||||
### Installation example (Ubuntu server 20.04 LTS)
|
||||
|
||||
Fresh VPS with latest updates\
|
||||
login as root and create a user and add to sudoers group (we will be creating a user called tactical)
|
||||
```
|
||||
apt update && apt -y upgrade
|
||||
adduser tactical
|
||||
usermod -a -G sudo tactical
|
||||
```
|
||||
|
||||
switch to the tactical user and setup the firewall
|
||||
```
|
||||
su - tactical
|
||||
sudo ufw default deny incoming
|
||||
sudo ufw default allow outgoing
|
||||
sudo ufw allow ssh
|
||||
sudo ufw allow http
|
||||
sudo ufw allow https
|
||||
sudo ufw allow proto tcp from any to any port 4222
|
||||
sudo ufw enable && sudo ufw reload
|
||||
```
|
||||
|
||||
Our domain for this example is tacticalrmm.com
|
||||
|
||||
In the DNS manager of wherever our domain is hosted, we will create three A records, all pointing to the public IP address of our VPS
|
||||
|
||||
Create A record ```api.tacticalrmm.com``` for the django rest backend\
|
||||
Create A record ```rmm.tacticalrmm.com``` for the vue frontend\
|
||||
Create A record ```mesh.tacticalrmm.com``` for meshcentral
|
||||
|
||||
Download the install script and run it
|
||||
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh
|
||||
chmod +x install.sh
|
||||
./install.sh
|
||||
```
|
||||
|
||||
Links will be provided at the end of the install script.\
|
||||
Download the executable from the first link, then open ```rmm.tacticalrmm.com``` and login.\
|
||||
Upload the executable when prompted during the initial setup page.
|
||||
|
||||
|
||||
### Install an agent
|
||||
From the app's dashboard, choose Agents > Install Agent to generate an installer.
|
||||
|
||||
## Updating
|
||||
Download and run [update.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh)
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh
|
||||
chmod +x update.sh
|
||||
./update.sh
|
||||
```
|
||||
|
||||
## Backup
|
||||
Download [backup.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh)
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh
|
||||
```
|
||||
Change the postgres username and password at the top of the file (you can find them in `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py` under the DATABASES section)
|
||||
|
||||
Run it
|
||||
```
|
||||
chmod +x backup.sh
|
||||
./backup.sh
|
||||
```
|
||||
|
||||
## Restore
|
||||
Change your 3 A records to point to new server's public IP
|
||||
|
||||
Create same linux user account as old server and add to sudoers group and setup firewall (see install instructions above)
|
||||
|
||||
Copy backup file to new server
|
||||
|
||||
Download the restore script, and edit the postgres username/password at the top of the file. Same instructions as above in the backup steps.
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh
|
||||
```
|
||||
|
||||
Run the restore script, passing it the backup tar file as the first argument
|
||||
```
|
||||
chmod +x restore.sh
|
||||
./restore.sh rmm-backup-xxxxxxx.tar
|
||||
```
|
||||
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
@@ -1,5 +1,4 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from rest_framework.authtoken.admin import TokenAdmin
|
||||
|
||||
from .models import User
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
from knox.models import AuthToken
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import pyotp
|
||||
import subprocess
|
||||
|
||||
import pyotp
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generates barcode for Google Authenticator and creates totp for user"
|
||||
help = "Generates barcode for Authenticator and creates totp for user"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("code", type=str)
|
||||
@@ -24,12 +26,10 @@ class Command(BaseCommand):
|
||||
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
||||
subprocess.run(f'qr "{url}"', shell=True)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Scan the barcode above with your google authenticator app"
|
||||
)
|
||||
self.style.SUCCESS("Scan the barcode above with your authenticator app")
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"If that doesn't work you may manually enter the key: {code}"
|
||||
f"If that doesn't work you may manually enter the setup key: {code}"
|
||||
)
|
||||
)
|
||||
|
||||
57
api/tacticalrmm/accounts/management/commands/reset_2fa.py
Normal file
57
api/tacticalrmm/accounts/management/commands/reset_2fa.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import pyotp
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Reset 2fa"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("username", type=str)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
username = kwargs["username"]
|
||||
try:
|
||||
user = User.objects.get(username=username)
|
||||
except User.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||
return
|
||||
|
||||
domain = "Tactical RMM"
|
||||
nginx = "/etc/nginx/sites-available/frontend.conf"
|
||||
found = None
|
||||
if os.path.exists(nginx):
|
||||
try:
|
||||
with open(nginx, "r") as f:
|
||||
for line in f:
|
||||
if "server_name" in line:
|
||||
found = line
|
||||
break
|
||||
|
||||
if found:
|
||||
rep = found.replace("server_name", "").replace(";", "")
|
||||
domain = "".join(rep.split())
|
||||
except:
|
||||
pass
|
||||
|
||||
code = pyotp.random_base32()
|
||||
user.totp_key = code
|
||||
user.save(update_fields=["totp_key"])
|
||||
|
||||
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
||||
subprocess.run(f'qr "{url}"', shell=True)
|
||||
self.stdout.write(
|
||||
self.style.WARNING("Scan the barcode above with your authenticator app")
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f"If that doesn't work you may manually enter the setup key: {code}"
|
||||
)
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"2fa was successfully reset for user {username}")
|
||||
)
|
||||
@@ -0,0 +1,22 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Reset password for user"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("username", type=str)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
username = kwargs["username"]
|
||||
try:
|
||||
user = User.objects.get(username=username)
|
||||
except User.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||
return
|
||||
|
||||
passwd = input("Enter new password: ")
|
||||
user.set_password(passwd)
|
||||
user.save()
|
||||
self.stdout.write(self.style.SUCCESS(f"Password for {username} was reset!"))
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
import django.contrib.auth.models
|
||||
import django.contrib.auth.validators
|
||||
from django.db import migrations, models
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-10 20:24
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-02-28 06:38
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0011_user_default_agent_tbl_tab'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='agents_per_page',
|
||||
field=models.PositiveIntegerField(default=50),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-09 02:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0012_user_agents_per_page'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='client_tree_sort',
|
||||
field=models.CharField(choices=[('alphafail', 'Move failing clients to the top'), ('alpha', 'Sort alphabetically')], default='alphafail', max_length=50),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,5 @@
|
||||
from django.db import models
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.db import models
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
@@ -15,6 +15,11 @@ AGENT_TBL_TAB_CHOICES = [
|
||||
("mixed", "Mixed"),
|
||||
]
|
||||
|
||||
CLIENT_TREE_SORT_CHOICES = [
|
||||
("alphafail", "Move failing clients to the top"),
|
||||
("alpha", "Sort alphabetically"),
|
||||
]
|
||||
|
||||
|
||||
class User(AbstractUser, BaseAuditModel):
|
||||
is_active = models.BooleanField(default=True)
|
||||
@@ -27,6 +32,10 @@ class User(AbstractUser, BaseAuditModel):
|
||||
default_agent_tbl_tab = models.CharField(
|
||||
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
|
||||
)
|
||||
agents_per_page = models.PositiveIntegerField(default=50) # not currently used
|
||||
client_tree_sort = models.CharField(
|
||||
max_length=50, choices=CLIENT_TREE_SORT_CHOICES, default="alphafail"
|
||||
)
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
|
||||
@@ -1,13 +1,21 @@
|
||||
import pyotp
|
||||
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
|
||||
from .models import User
|
||||
|
||||
|
||||
class UserUISerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"dark_mode",
|
||||
"show_community_scripts",
|
||||
"agent_dblclick_action",
|
||||
"default_agent_tbl_tab",
|
||||
"client_tree_sort",
|
||||
]
|
||||
|
||||
|
||||
class UserSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import override_settings
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from accounts.models import User
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class TestAccounts(TacticalTestCase):
|
||||
@@ -270,23 +271,14 @@ class TestUserAction(TacticalTestCase):
|
||||
|
||||
def test_user_ui(self):
|
||||
url = "/accounts/users/ui/"
|
||||
data = {"dark_mode": False}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data = {"show_community_scripts": True}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data = {"agent_dblclick_action": "editagent"}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data = {"agent_dblclick_action": "remotebg"}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data = {"agent_dblclick_action": "takecontrol"}
|
||||
data = {
|
||||
"dark_mode": True,
|
||||
"show_community_scripts": True,
|
||||
"agent_dblclick_action": "editagent",
|
||||
"default_agent_tbl_tab": "mixed",
|
||||
"client_tree_sort": "alpha",
|
||||
}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
|
||||
@@ -1,23 +1,28 @@
|
||||
import pyotp
|
||||
|
||||
from django.contrib.auth import login
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.contrib.auth import login
|
||||
from django.db import IntegrityError
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from django.shortcuts import get_object_or_404
|
||||
from knox.views import LoginView as KnoxLoginView
|
||||
from rest_framework import status
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from .models import User
|
||||
from agents.models import Agent
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .serializers import UserSerializer, TOTPSetupSerializer
|
||||
from .models import User
|
||||
from .serializers import TOTPSetupSerializer, UserSerializer, UserUISerializer
|
||||
|
||||
|
||||
def _is_root_user(request, user) -> bool:
|
||||
return (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
)
|
||||
|
||||
|
||||
class CheckCreds(KnoxLoginView):
|
||||
@@ -81,7 +86,7 @@ class GetAddUsers(APIView):
|
||||
def post(self, request):
|
||||
# add new user
|
||||
try:
|
||||
user = User.objects.create_user(
|
||||
user = User.objects.create_user( # type: ignore
|
||||
request.data["username"],
|
||||
request.data["email"],
|
||||
request.data["password"],
|
||||
@@ -108,11 +113,7 @@ class GetUpdateDeleteUser(APIView):
|
||||
def put(self, request, pk):
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
if _is_root_user(request, user):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||
@@ -123,11 +124,7 @@ class GetUpdateDeleteUser(APIView):
|
||||
|
||||
def delete(self, request, pk):
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
if _is_root_user(request, user):
|
||||
return notify_error("The root user cannot be deleted from the UI")
|
||||
|
||||
user.delete()
|
||||
@@ -140,11 +137,7 @@ class UserActions(APIView):
|
||||
# reset password
|
||||
def post(self, request):
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
if _is_root_user(request, user):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
user.set_password(request.data["password"])
|
||||
@@ -155,11 +148,7 @@ class UserActions(APIView):
|
||||
# reset two factor token
|
||||
def put(self, request):
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
if _is_root_user(request, user):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
user.totp_key = ""
|
||||
@@ -187,19 +176,9 @@ class TOTPSetup(APIView):
|
||||
|
||||
class UserUI(APIView):
|
||||
def patch(self, request):
|
||||
user = request.user
|
||||
|
||||
if "dark_mode" in request.data.keys():
|
||||
user.dark_mode = request.data["dark_mode"]
|
||||
user.save(update_fields=["dark_mode"])
|
||||
|
||||
if "show_community_scripts" in request.data.keys():
|
||||
user.show_community_scripts = request.data["show_community_scripts"]
|
||||
user.save(update_fields=["show_community_scripts"])
|
||||
|
||||
if "userui" in request.data.keys():
|
||||
user.agent_dblclick_action = request.data["agent_dblclick_action"]
|
||||
user.default_agent_tbl_tab = request.data["default_agent_tbl_tab"]
|
||||
user.save(update_fields=["agent_dblclick_action", "default_agent_tbl_tab"])
|
||||
|
||||
serializer = UserUISerializer(
|
||||
instance=request.user, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("ok")
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Agent, AgentOutage, RecoveryAction, Note
|
||||
from .models import Agent, Note, RecoveryAction
|
||||
|
||||
admin.site.register(Agent)
|
||||
admin.site.register(AgentOutage)
|
||||
admin.site.register(RecoveryAction)
|
||||
admin.site.register(Note)
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import os
|
||||
import json
|
||||
|
||||
from model_bakery.recipe import Recipe, seq
|
||||
from itertools import cycle
|
||||
from django.utils import timezone as djangotime
|
||||
from django.conf import settings
|
||||
|
||||
from .models import Agent
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery.recipe import Recipe, foreign_key, seq
|
||||
|
||||
|
||||
def generate_agent_id(hostname):
|
||||
@@ -16,6 +14,9 @@ def generate_agent_id(hostname):
|
||||
return f"{rand}-{hostname}"
|
||||
|
||||
|
||||
site = Recipe("clients.Site")
|
||||
|
||||
|
||||
def get_wmi_data():
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json")
|
||||
@@ -24,12 +25,12 @@ def get_wmi_data():
|
||||
|
||||
|
||||
agent = Recipe(
|
||||
Agent,
|
||||
"agents.Agent",
|
||||
site=foreign_key(site),
|
||||
hostname="DESKTOP-TEST123",
|
||||
version="1.3.0",
|
||||
monitoring_type=cycle(["workstation", "server"]),
|
||||
salt_id=generate_agent_id("DESKTOP-TEST123"),
|
||||
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
|
||||
agent_id=seq("asdkj3h4234-1234hg3h4g34-234jjh34|DESKTOP-TEST123"),
|
||||
)
|
||||
|
||||
server_agent = agent.extend(
|
||||
@@ -42,8 +43,12 @@ workstation_agent = agent.extend(
|
||||
|
||||
online_agent = agent.extend(last_seen=djangotime.now())
|
||||
|
||||
offline_agent = agent.extend(
|
||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=7)
|
||||
)
|
||||
|
||||
overdue_agent = agent.extend(
|
||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=6)
|
||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=35)
|
||||
)
|
||||
|
||||
agent_with_services = agent.extend(
|
||||
|
||||
@@ -0,0 +1,93 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
from clients.models import Client, Site
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Bulk update agent offline/overdue time"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("time", type=int, help="Time in minutes")
|
||||
parser.add_argument(
|
||||
"--client",
|
||||
type=str,
|
||||
help="Client Name",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--site",
|
||||
type=str,
|
||||
help="Site Name",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--offline",
|
||||
action="store_true",
|
||||
help="Offline",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--overdue",
|
||||
action="store_true",
|
||||
help="Overdue",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all",
|
||||
action="store_true",
|
||||
help="All agents",
|
||||
)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
time = kwargs["time"]
|
||||
client_name = kwargs["client"]
|
||||
site_name = kwargs["site"]
|
||||
all_agents = kwargs["all"]
|
||||
offline = kwargs["offline"]
|
||||
overdue = kwargs["overdue"]
|
||||
agents = None
|
||||
|
||||
if offline and time < 2:
|
||||
self.stdout.write(self.style.ERROR("Minimum offline time is 2 minutes"))
|
||||
return
|
||||
|
||||
if overdue and time < 3:
|
||||
self.stdout.write(self.style.ERROR("Minimum overdue time is 3 minutes"))
|
||||
return
|
||||
|
||||
if client_name:
|
||||
try:
|
||||
client = Client.objects.get(name=client_name)
|
||||
except Client.DoesNotExist:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f"Client {client_name} doesn't exist")
|
||||
)
|
||||
return
|
||||
|
||||
agents = Agent.objects.filter(site__client=client)
|
||||
|
||||
elif site_name:
|
||||
try:
|
||||
site = Site.objects.get(name=site_name)
|
||||
except Site.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f"Site {site_name} doesn't exist"))
|
||||
return
|
||||
|
||||
agents = Agent.objects.filter(site=site)
|
||||
|
||||
elif all_agents:
|
||||
agents = Agent.objects.all()
|
||||
|
||||
if agents:
|
||||
if offline:
|
||||
agents.update(offline_time=time)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Changed offline time on {len(agents)} agents to {time} minutes"
|
||||
)
|
||||
)
|
||||
|
||||
if overdue:
|
||||
agents.update(overdue_time=time)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Changed overdue time on {len(agents)} agents to {time} minutes"
|
||||
)
|
||||
)
|
||||
@@ -0,0 +1,18 @@
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Shows online agents that are not on the latest version"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
q = Agent.objects.exclude(version=settings.LATEST_AGENT_VER).only(
|
||||
"pk", "version", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
agents = [i for i in q if i.status == "online"]
|
||||
for agent in agents:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"{agent.hostname} - v{agent.version}")
|
||||
)
|
||||
@@ -1,8 +1,8 @@
|
||||
# Generated by Django 3.0.6 on 2020-05-31 01:23
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.0.7 on 2020-06-09 16:07
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.0.8 on 2020-08-09 05:31
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Generated by Django 3.1.1 on 2020-09-22 20:57
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-01 22:53
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-29 21:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0026_auto_20201125_2334'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='overdue_dashboard_alert',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/agents/migrations/0028_auto_20210206_1534.py
Normal file
23
api/tacticalrmm/agents/migrations/0028_auto_20210206_1534.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-06 15:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0027_agent_overdue_dashboard_alert'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agentoutage',
|
||||
name='outage_email_sent_time',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='agentoutage',
|
||||
name='outage_sms_sent_time',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
16
api/tacticalrmm/agents/migrations/0029_delete_agentoutage.py
Normal file
16
api/tacticalrmm/agents/migrations/0029_delete_agentoutage.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-10 21:56
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0028_auto_20210206_1534'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name='AgentOutage',
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/agents/migrations/0030_agent_offline_time.py
Normal file
18
api/tacticalrmm/agents/migrations/0030_agent_offline_time.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.6 on 2021-02-16 08:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0029_delete_agentoutage'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='offline_time',
|
||||
field=models.PositiveIntegerField(default=4),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-04 03:57
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0006_auto_20210217_1736'),
|
||||
('agents', '0030_agent_offline_time'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='alert_template',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='alerts.alerttemplate'),
|
||||
),
|
||||
]
|
||||
@@ -1,25 +1,26 @@
|
||||
import time
|
||||
import asyncio
|
||||
import base64
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Hash import SHA3_384
|
||||
from Crypto.Util.Padding import pad
|
||||
import validators
|
||||
import msgpack
|
||||
import re
|
||||
import time
|
||||
from collections import Counter
|
||||
from typing import List
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from distutils.version import LooseVersion
|
||||
from typing import Any, Union
|
||||
|
||||
import msgpack
|
||||
import validators
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Hash import SHA3_384
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Util.Padding import pad
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from nats.aio.client import Client as NATS
|
||||
from nats.aio.errors import ErrTimeout
|
||||
from packaging import version as pyver
|
||||
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from core.models import CoreSettings, TZ_CHOICES
|
||||
from core.models import TZ_CHOICES, CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
@@ -50,6 +51,8 @@ class Agent(BaseAuditModel):
|
||||
mesh_node_id = models.CharField(null=True, blank=True, max_length=255)
|
||||
overdue_email_alert = models.BooleanField(default=False)
|
||||
overdue_text_alert = models.BooleanField(default=False)
|
||||
overdue_dashboard_alert = models.BooleanField(default=False)
|
||||
offline_time = models.PositiveIntegerField(default=4)
|
||||
overdue_time = models.PositiveIntegerField(default=30)
|
||||
check_interval = models.PositiveIntegerField(default=120)
|
||||
needs_reboot = models.BooleanField(default=False)
|
||||
@@ -60,6 +63,13 @@ class Agent(BaseAuditModel):
|
||||
max_length=255, choices=TZ_CHOICES, null=True, blank=True
|
||||
)
|
||||
maintenance_mode = models.BooleanField(default=False)
|
||||
alert_template = models.ForeignKey(
|
||||
"alerts.AlertTemplate",
|
||||
related_name="agents",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
site = models.ForeignKey(
|
||||
"clients.Site",
|
||||
related_name="agents",
|
||||
@@ -75,6 +85,24 @@ class Agent(BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
# get old agent if exists
|
||||
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
# check if new agent has been created
|
||||
# or check if policy have changed on agent
|
||||
# or if site has changed on agent and if so generate-policies
|
||||
if (
|
||||
not old_agent
|
||||
or old_agent
|
||||
and old_agent.policy != self.policy
|
||||
or old_agent.site != self.site
|
||||
):
|
||||
self.generate_checks_from_policies()
|
||||
self.generate_tasks_from_policies()
|
||||
|
||||
def __str__(self):
|
||||
return self.hostname
|
||||
|
||||
@@ -82,14 +110,6 @@ class Agent(BaseAuditModel):
|
||||
def client(self):
|
||||
return self.site.client
|
||||
|
||||
@property
|
||||
def has_nats(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.0")
|
||||
|
||||
@property
|
||||
def has_gotasks(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.1")
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
# return the default timezone unless the timezone is explicity set per agent
|
||||
@@ -127,7 +147,7 @@ class Agent(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
offline = djangotime.now() - djangotime.timedelta(minutes=6)
|
||||
offline = djangotime.now() - djangotime.timedelta(minutes=self.offline_time)
|
||||
overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time)
|
||||
|
||||
if self.last_seen is not None:
|
||||
@@ -142,14 +162,14 @@ class Agent(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def has_patches_pending(self):
|
||||
return self.winupdates.filter(action="approve").filter(installed=False).exists()
|
||||
return self.winupdates.filter(action="approve").filter(installed=False).exists() # type: ignore
|
||||
|
||||
@property
|
||||
def checks(self):
|
||||
total, passing, failing = 0, 0, 0
|
||||
|
||||
if self.agentchecks.exists():
|
||||
for i in self.agentchecks.all():
|
||||
if self.agentchecks.exists(): # type: ignore
|
||||
for i in self.agentchecks.all(): # type: ignore
|
||||
total += 1
|
||||
if i.status == "passing":
|
||||
passing += 1
|
||||
@@ -219,6 +239,7 @@ class Agent(BaseAuditModel):
|
||||
pass
|
||||
|
||||
try:
|
||||
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
||||
return [x["Version"] for x in comp_sys_prod if "Version" in x][0]
|
||||
except:
|
||||
pass
|
||||
@@ -248,33 +269,104 @@ class Agent(BaseAuditModel):
|
||||
except:
|
||||
return ["unknown disk"]
|
||||
|
||||
def check_run_interval(self) -> int:
|
||||
interval = self.check_interval
|
||||
# determine if any agent checks have a custom interval and set the lowest interval
|
||||
for check in self.agentchecks.filter(overriden_by_policy=False): # type: ignore
|
||||
if check.run_interval and check.run_interval < interval:
|
||||
|
||||
# don't allow check runs less than 15s
|
||||
if check.run_interval < 15:
|
||||
interval = 15
|
||||
else:
|
||||
interval = check.run_interval
|
||||
|
||||
return interval
|
||||
|
||||
def run_script(
|
||||
self,
|
||||
scriptpk: int,
|
||||
args: list[str] = [],
|
||||
timeout: int = 120,
|
||||
full: bool = False,
|
||||
wait: bool = False,
|
||||
run_on_any: bool = False,
|
||||
) -> Any:
|
||||
|
||||
from scripts.models import Script
|
||||
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
data = {
|
||||
"func": "runscriptfull" if full else "runscript",
|
||||
"timeout": timeout,
|
||||
"script_args": args,
|
||||
"payload": {
|
||||
"code": script.code,
|
||||
"shell": script.shell,
|
||||
},
|
||||
}
|
||||
|
||||
running_agent = self
|
||||
if run_on_any:
|
||||
nats_ping = {"func": "ping"}
|
||||
|
||||
# try on self first
|
||||
r = asyncio.run(self.nats_cmd(nats_ping, timeout=1))
|
||||
|
||||
if r == "pong":
|
||||
running_agent = self
|
||||
else:
|
||||
online = [
|
||||
agent
|
||||
for agent in Agent.objects.only(
|
||||
"pk", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
if agent.status == "online"
|
||||
]
|
||||
|
||||
for agent in online:
|
||||
r = asyncio.run(agent.nats_cmd(nats_ping, timeout=1))
|
||||
if r == "pong":
|
||||
running_agent = agent
|
||||
break
|
||||
|
||||
if running_agent.pk == self.pk:
|
||||
return "Unable to find an online agent"
|
||||
|
||||
if wait:
|
||||
return asyncio.run(running_agent.nats_cmd(data, timeout=timeout, wait=True))
|
||||
else:
|
||||
asyncio.run(running_agent.nats_cmd(data, wait=False))
|
||||
|
||||
return "ok"
|
||||
|
||||
# auto approves updates
|
||||
def approve_updates(self):
|
||||
patch_policy = self.get_patch_policy()
|
||||
|
||||
updates = list()
|
||||
if patch_policy.critical == "approve":
|
||||
updates += self.winupdates.filter(
|
||||
updates += self.winupdates.filter( # type: ignore
|
||||
severity="Critical", installed=False
|
||||
).exclude(action="approve")
|
||||
|
||||
if patch_policy.important == "approve":
|
||||
updates += self.winupdates.filter(
|
||||
updates += self.winupdates.filter( # type: ignore
|
||||
severity="Important", installed=False
|
||||
).exclude(action="approve")
|
||||
|
||||
if patch_policy.moderate == "approve":
|
||||
updates += self.winupdates.filter(
|
||||
updates += self.winupdates.filter( # type: ignore
|
||||
severity="Moderate", installed=False
|
||||
).exclude(action="approve")
|
||||
|
||||
if patch_policy.low == "approve":
|
||||
updates += self.winupdates.filter(severity="Low", installed=False).exclude(
|
||||
updates += self.winupdates.filter(severity="Low", installed=False).exclude( # type: ignore
|
||||
action="approve"
|
||||
)
|
||||
|
||||
if patch_policy.other == "approve":
|
||||
updates += self.winupdates.filter(severity="", installed=False).exclude(
|
||||
updates += self.winupdates.filter(severity="", installed=False).exclude( # type: ignore
|
||||
action="approve"
|
||||
)
|
||||
|
||||
@@ -289,7 +381,7 @@ class Agent(BaseAuditModel):
|
||||
site = self.site
|
||||
core_settings = CoreSettings.objects.first()
|
||||
patch_policy = None
|
||||
agent_policy = self.winupdatepolicy.get()
|
||||
agent_policy = self.winupdatepolicy.get() # type: ignore
|
||||
|
||||
if self.monitoring_type == "server":
|
||||
# check agent policy first which should override client or site policy
|
||||
@@ -374,18 +466,132 @@ class Agent(BaseAuditModel):
|
||||
|
||||
return patch_policy
|
||||
|
||||
def get_approved_update_guids(self) -> List[str]:
|
||||
def get_approved_update_guids(self) -> list[str]:
|
||||
return list(
|
||||
self.winupdates.filter(action="approve", installed=False).values_list(
|
||||
self.winupdates.filter(action="approve", installed=False).values_list( # type: ignore
|
||||
"guid", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
# sets alert template assigned in the following order: policy, site, client, global
|
||||
# sets None if nothing is found
|
||||
def set_alert_template(self):
|
||||
|
||||
site = self.site
|
||||
client = self.client
|
||||
core = CoreSettings.objects.first()
|
||||
|
||||
templates = list()
|
||||
# check if alert template is on a policy assigned to agent
|
||||
if (
|
||||
self.policy
|
||||
and self.policy.alert_template
|
||||
and self.policy.alert_template.is_active
|
||||
):
|
||||
templates.append(self.policy.alert_template)
|
||||
|
||||
# check if policy with alert template is assigned to the site
|
||||
if (
|
||||
self.monitoring_type == "server"
|
||||
and site.server_policy
|
||||
and site.server_policy.alert_template
|
||||
and site.server_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(site.server_policy.alert_template)
|
||||
if (
|
||||
self.monitoring_type == "workstation"
|
||||
and site.workstation_policy
|
||||
and site.workstation_policy.alert_template
|
||||
and site.workstation_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(site.workstation_policy.alert_template)
|
||||
|
||||
# check if alert template is assigned to site
|
||||
if site.alert_template and site.alert_template.is_active:
|
||||
templates.append(site.alert_template)
|
||||
|
||||
# check if policy with alert template is assigned to the client
|
||||
if (
|
||||
self.monitoring_type == "server"
|
||||
and client.server_policy
|
||||
and client.server_policy.alert_template
|
||||
and client.server_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(client.server_policy.alert_template)
|
||||
if (
|
||||
self.monitoring_type == "workstation"
|
||||
and client.workstation_policy
|
||||
and client.workstation_policy.alert_template
|
||||
and client.workstation_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(client.workstation_policy.alert_template)
|
||||
|
||||
# check if alert template is on client and return
|
||||
if client.alert_template and client.alert_template.is_active:
|
||||
templates.append(client.alert_template)
|
||||
|
||||
# check if alert template is applied globally and return
|
||||
if core.alert_template and core.alert_template.is_active:
|
||||
templates.append(core.alert_template)
|
||||
|
||||
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
||||
if (
|
||||
self.monitoring_type == "server"
|
||||
and core.server_policy
|
||||
and core.server_policy.alert_template
|
||||
and core.server_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(core.server_policy.alert_template)
|
||||
if (
|
||||
self.monitoring_type == "workstation"
|
||||
and core.workstation_policy
|
||||
and core.workstation_policy.alert_template
|
||||
and core.workstation_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(core.workstation_policy.alert_template)
|
||||
|
||||
# go through the templates and return the first one that isn't excluded
|
||||
for template in templates:
|
||||
# check if client, site, or agent has been excluded from template
|
||||
if (
|
||||
client.pk
|
||||
in template.excluded_clients.all().values_list("pk", flat=True)
|
||||
or site.pk in template.excluded_sites.all().values_list("pk", flat=True)
|
||||
or self.pk
|
||||
in template.excluded_agents.all()
|
||||
.only("pk")
|
||||
.values_list("pk", flat=True)
|
||||
):
|
||||
continue
|
||||
|
||||
# check if template is excluding desktops
|
||||
elif (
|
||||
self.monitoring_type == "workstation" and template.exclude_workstations
|
||||
):
|
||||
continue
|
||||
|
||||
# check if template is excluding servers
|
||||
elif self.monitoring_type == "server" and template.exclude_servers:
|
||||
continue
|
||||
|
||||
else:
|
||||
# save alert_template to agent cache field
|
||||
self.alert_template = template
|
||||
self.save()
|
||||
|
||||
return template
|
||||
|
||||
# no alert templates found or agent has been excluded
|
||||
self.alert_template = None
|
||||
self.save()
|
||||
|
||||
return None
|
||||
|
||||
def generate_checks_from_policies(self):
|
||||
from automation.models import Policy
|
||||
|
||||
# Clear agent checks that have overriden_by_policy set
|
||||
self.agentchecks.update(overriden_by_policy=False)
|
||||
self.agentchecks.update(overriden_by_policy=False) # type: ignore
|
||||
|
||||
# Generate checks based on policies
|
||||
Policy.generate_policy_checks(self)
|
||||
@@ -420,7 +626,7 @@ class Agent(BaseAuditModel):
|
||||
except Exception:
|
||||
return "err"
|
||||
|
||||
async def nats_cmd(self, data, timeout=30, wait=True):
|
||||
async def nats_cmd(self, data: dict, timeout: int = 30, wait: bool = True):
|
||||
nc = NATS()
|
||||
options = {
|
||||
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
@@ -442,7 +648,7 @@ class Agent(BaseAuditModel):
|
||||
except ErrTimeout:
|
||||
ret = "timeout"
|
||||
else:
|
||||
ret = msgpack.loads(msg.data)
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
|
||||
await nc.close()
|
||||
return ret
|
||||
@@ -464,12 +670,12 @@ class Agent(BaseAuditModel):
|
||||
def delete_superseded_updates(self):
|
||||
try:
|
||||
pks = [] # list of pks to delete
|
||||
kbs = list(self.winupdates.values_list("kb", flat=True))
|
||||
kbs = list(self.winupdates.values_list("kb", flat=True)) # type: ignore
|
||||
d = Counter(kbs)
|
||||
dupes = [k for k, v in d.items() if v > 1]
|
||||
|
||||
for dupe in dupes:
|
||||
titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True)
|
||||
titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True) # type: ignore
|
||||
# extract the version from the title and sort from oldest to newest
|
||||
# skip if no version info is available therefore nothing to parse
|
||||
try:
|
||||
@@ -482,24 +688,24 @@ class Agent(BaseAuditModel):
|
||||
continue
|
||||
# append all but the latest version to our list of pks to delete
|
||||
for ver in sorted_vers[:-1]:
|
||||
q = self.winupdates.filter(kb=dupe).filter(title__contains=ver)
|
||||
q = self.winupdates.filter(kb=dupe).filter(title__contains=ver) # type: ignore
|
||||
pks.append(q.first().pk)
|
||||
|
||||
pks = list(set(pks))
|
||||
self.winupdates.filter(pk__in=pks).delete()
|
||||
self.winupdates.filter(pk__in=pks).delete() # type: ignore
|
||||
except:
|
||||
pass
|
||||
|
||||
# define how the agent should handle pending actions
|
||||
def handle_pending_actions(self):
|
||||
pending_actions = self.pendingactions.filter(status="pending")
|
||||
pending_actions = self.pendingactions.filter(status="pending") # type: ignore
|
||||
|
||||
for action in pending_actions:
|
||||
if action.action_type == "taskaction":
|
||||
from autotasks.tasks import (
|
||||
create_win_task_schedule,
|
||||
enable_or_disable_win_task,
|
||||
delete_win_task_schedule,
|
||||
enable_or_disable_win_task,
|
||||
)
|
||||
|
||||
task_id = action.details["task_id"]
|
||||
@@ -516,42 +722,38 @@ class Agent(BaseAuditModel):
|
||||
# for clearing duplicate pending actions on agent
|
||||
def remove_matching_pending_task_actions(self, task_id):
|
||||
# remove any other pending actions on agent with same task_id
|
||||
for action in self.pendingactions.exclude(status="completed"):
|
||||
for action in self.pendingactions.filter(action_type="taskaction").exclude(status="completed"): # type: ignore
|
||||
if action.details["task_id"] == task_id:
|
||||
action.delete()
|
||||
|
||||
|
||||
class AgentOutage(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="agentoutages",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.CASCADE,
|
||||
def should_create_alert(self, alert_template=None):
|
||||
return (
|
||||
self.overdue_dashboard_alert
|
||||
or self.overdue_email_alert
|
||||
or self.overdue_text_alert
|
||||
or (
|
||||
alert_template
|
||||
and (
|
||||
alert_template.agent_always_alert
|
||||
or alert_template.agent_always_email
|
||||
or alert_template.agent_always_text
|
||||
)
|
||||
)
|
||||
)
|
||||
outage_time = models.DateTimeField(auto_now_add=True)
|
||||
recovery_time = models.DateTimeField(null=True, blank=True)
|
||||
outage_email_sent = models.BooleanField(default=False)
|
||||
outage_sms_sent = models.BooleanField(default=False)
|
||||
recovery_email_sent = models.BooleanField(default=False)
|
||||
recovery_sms_sent = models.BooleanField(default=False)
|
||||
|
||||
@property
|
||||
def is_active(self):
|
||||
return False if self.recovery_time else True
|
||||
|
||||
def send_outage_email(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue",
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
(
|
||||
f"Data has not been received from client {self.agent.client.name}, "
|
||||
f"site {self.agent.site.name}, "
|
||||
f"agent {self.agent.hostname} "
|
||||
f"Data has not been received from client {self.client.name}, "
|
||||
f"site {self.site.name}, "
|
||||
f"agent {self.hostname} "
|
||||
"within the expected time."
|
||||
),
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
|
||||
def send_recovery_email(self):
|
||||
@@ -559,13 +761,14 @@ class AgentOutage(models.Model):
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received",
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
(
|
||||
f"Data has been received from client {self.agent.client.name}, "
|
||||
f"site {self.agent.site.name}, "
|
||||
f"agent {self.agent.hostname} "
|
||||
f"Data has been received from client {self.client.name}, "
|
||||
f"site {self.site.name}, "
|
||||
f"agent {self.hostname} "
|
||||
"after an interruption in data transmission."
|
||||
),
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
|
||||
def send_outage_sms(self):
|
||||
@@ -573,7 +776,8 @@ class AgentOutage(models.Model):
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue"
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
|
||||
def send_recovery_sms(self):
|
||||
@@ -581,12 +785,10 @@ class AgentOutage(models.Model):
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received"
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.agent.hostname
|
||||
|
||||
|
||||
RECOVERY_CHOICES = [
|
||||
("salt", "Salt"),
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import pytz
|
||||
|
||||
from rest_framework import serializers
|
||||
from rest_framework.fields import ReadOnlyField
|
||||
|
||||
from clients.serializers import ClientSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, Note
|
||||
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from clients.serializers import ClientSerializer
|
||||
|
||||
|
||||
class AgentSerializer(serializers.ModelSerializer):
|
||||
# for vue
|
||||
@@ -34,6 +32,17 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
]
|
||||
|
||||
|
||||
class AgentOverdueActionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = [
|
||||
"pk",
|
||||
"overdue_email_alert",
|
||||
"overdue_text_alert",
|
||||
"overdue_dashboard_alert",
|
||||
]
|
||||
|
||||
|
||||
class AgentTableSerializer(serializers.ModelSerializer):
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
pending_actions = serializers.SerializerMethodField()
|
||||
@@ -44,6 +53,20 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
site_name = serializers.ReadOnlyField(source="site.name")
|
||||
logged_username = serializers.SerializerMethodField()
|
||||
italic = serializers.SerializerMethodField()
|
||||
policy = serializers.ReadOnlyField(source="policy.id")
|
||||
alert_template = serializers.SerializerMethodField()
|
||||
|
||||
def get_alert_template(self, obj):
|
||||
|
||||
if not obj.alert_template:
|
||||
return None
|
||||
else:
|
||||
return {
|
||||
"name": obj.alert_template.name,
|
||||
"always_email": obj.alert_template.agent_always_email,
|
||||
"always_text": obj.alert_template.agent_always_text,
|
||||
"always_alert": obj.alert_template.agent_always_alert,
|
||||
}
|
||||
|
||||
def get_pending_actions(self, obj):
|
||||
return obj.pendingactions.filter(status="pending").count()
|
||||
@@ -54,7 +77,7 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
else:
|
||||
agent_tz = self.context["default_tz"]
|
||||
|
||||
return obj.last_seen.astimezone(agent_tz).timestamp()
|
||||
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M")
|
||||
|
||||
def get_logged_username(self, obj) -> str:
|
||||
if obj.logged_in_username == "None" and obj.status == "online":
|
||||
@@ -71,6 +94,7 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
model = Agent
|
||||
fields = [
|
||||
"id",
|
||||
"alert_template",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"site_name",
|
||||
@@ -83,12 +107,14 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"status",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_dashboard_alert",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"checks",
|
||||
"maintenance_mode",
|
||||
"logged_username",
|
||||
"italic",
|
||||
"policy",
|
||||
]
|
||||
depth = 2
|
||||
|
||||
@@ -114,10 +140,12 @@ class AgentEditSerializer(serializers.ModelSerializer):
|
||||
"timezone",
|
||||
"check_interval",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"all_timezones",
|
||||
"winupdatepolicy",
|
||||
"policy",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,65 +1,37 @@
|
||||
import asyncio
|
||||
from loguru import logger
|
||||
from time import sleep
|
||||
import datetime as dt
|
||||
import random
|
||||
from packaging import version as pyver
|
||||
from typing import List
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
from django.conf import settings
|
||||
from scripts.models import Script
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
from agents.models import Agent, AgentOutage
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from logs.models import PendingAction
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
def _check_agent_service(pk: int) -> None:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=2))
|
||||
if r == "pong":
|
||||
logger.info(
|
||||
f"Detected crashed tacticalagent service on {agent.hostname}, attempting recovery"
|
||||
)
|
||||
data = {"func": "recover", "payload": {"mode": "tacagent"}}
|
||||
asyncio.run(agent.nats_cmd(data, wait=False))
|
||||
|
||||
|
||||
def _check_in_full(pk: int) -> None:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
asyncio.run(agent.nats_cmd({"func": "checkinfull"}, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def check_in_task() -> None:
|
||||
q = Agent.objects.only("pk", "version")
|
||||
agents: List[int] = [
|
||||
i.pk for i in q if pyver.parse(i.version) == pyver.parse("1.1.12")
|
||||
]
|
||||
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
_check_in_full(pk)
|
||||
sleep(0.1)
|
||||
rand = random.randint(3, 7)
|
||||
sleep(rand)
|
||||
|
||||
|
||||
@app.task
|
||||
def monitor_agents_task() -> None:
|
||||
q = Agent.objects.only("pk", "version", "last_seen", "overdue_time")
|
||||
agents: List[int] = [i.pk for i in q if i.has_nats and i.status != "online"]
|
||||
for agent in agents:
|
||||
_check_agent_service(agent)
|
||||
|
||||
|
||||
def agent_update(pk: int) -> str:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||
logger.warning(
|
||||
f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to auto update."
|
||||
)
|
||||
return "not supported"
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(f"Unable to determine arch on {agent.hostname}. Skipping.")
|
||||
logger.warning(
|
||||
f"Unable to determine arch on {agent.hostname}. Skipping agent update."
|
||||
)
|
||||
return "noarch"
|
||||
|
||||
# removed sqlite in 1.4.0 to get rid of cgo dependency
|
||||
@@ -75,18 +47,12 @@ def agent_update(pk: int) -> str:
|
||||
)
|
||||
url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}"
|
||||
|
||||
if agent.has_nats:
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
action = agent.pendingactions.filter(
|
||||
agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
if pyver.parse(action.details["version"]) < pyver.parse(version):
|
||||
action.delete()
|
||||
else:
|
||||
return "pending"
|
||||
).delete()
|
||||
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
@@ -97,7 +63,7 @@ def agent_update(pk: int) -> str:
|
||||
"inno": inno,
|
||||
},
|
||||
)
|
||||
else:
|
||||
|
||||
nats_data = {
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
@@ -107,19 +73,12 @@ def agent_update(pk: int) -> str:
|
||||
},
|
||||
}
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
return "created"
|
||||
|
||||
return "not supported"
|
||||
|
||||
|
||||
@app.task
|
||||
def send_agent_update_task(pks: List[int], version: str) -> None:
|
||||
q = Agent.objects.filter(pk__in=pks)
|
||||
agents: List[int] = [
|
||||
i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)
|
||||
]
|
||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
||||
def send_agent_update_task(pks: list[int]) -> None:
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent_update(pk)
|
||||
@@ -134,7 +93,7 @@ def auto_self_agent_update_task() -> None:
|
||||
return
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
pks: List[int] = [
|
||||
pks: list[int] = [
|
||||
i.pk
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
@@ -149,127 +108,111 @@ def auto_self_agent_update_task() -> None:
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_task():
|
||||
agents = Agent.objects.only("pk", "version", "last_seen", "overdue_time")
|
||||
online = [
|
||||
i
|
||||
for i in agents
|
||||
if pyver.parse(i.version) >= pyver.parse("1.2.0") and i.status == "online"
|
||||
]
|
||||
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
||||
for chunk in chunks:
|
||||
for agent in chunk:
|
||||
asyncio.run(agent.nats_cmd({"func": "wmi"}, wait=False))
|
||||
sleep(0.1)
|
||||
rand = random.randint(3, 7)
|
||||
sleep(rand)
|
||||
def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
@app.task
|
||||
def sync_sysinfo_task():
|
||||
agents = Agent.objects.only("pk", "version", "last_seen", "overdue_time")
|
||||
online = [
|
||||
i
|
||||
for i in agents
|
||||
if pyver.parse(i.version) >= pyver.parse("1.1.3")
|
||||
and pyver.parse(i.version) <= pyver.parse("1.1.12")
|
||||
and i.status == "online"
|
||||
]
|
||||
|
||||
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
||||
for chunk in chunks:
|
||||
for agent in chunk:
|
||||
asyncio.run(agent.nats_cmd({"func": "sync"}, wait=False))
|
||||
sleep(0.1)
|
||||
rand = random.randint(3, 7)
|
||||
sleep(rand)
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_outage_email_task(pk):
|
||||
if not alert.email_sent:
|
||||
sleep(random.randint(1, 15))
|
||||
outage = AgentOutage.objects.get(pk=pk)
|
||||
outage.send_outage_email()
|
||||
outage.outage_email_sent = True
|
||||
outage.save(update_fields=["outage_email_sent"])
|
||||
alert.agent.send_outage_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
else:
|
||||
if alert_interval:
|
||||
# send an email only if the last email sent is older than alert interval
|
||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||
if alert.email_sent < delta:
|
||||
sleep(random.randint(1, 10))
|
||||
alert.agent.send_outage_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_recovery_email_task(pk):
|
||||
def agent_recovery_email_task(pk: int) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
sleep(random.randint(1, 15))
|
||||
outage = AgentOutage.objects.get(pk=pk)
|
||||
outage.send_recovery_email()
|
||||
outage.recovery_email_sent = True
|
||||
outage.save(update_fields=["recovery_email_sent"])
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
alert.agent.send_recovery_email()
|
||||
alert.resolved_email_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_email_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_outage_sms_task(pk):
|
||||
def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
if not alert.sms_sent:
|
||||
sleep(random.randint(1, 15))
|
||||
alert.agent.send_outage_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
else:
|
||||
if alert_interval:
|
||||
# send an sms only if the last sms sent is older than alert interval
|
||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||
if alert.sms_sent < delta:
|
||||
sleep(random.randint(1, 10))
|
||||
alert.agent.send_outage_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_recovery_sms_task(pk: int) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
sleep(random.randint(1, 3))
|
||||
outage = AgentOutage.objects.get(pk=pk)
|
||||
outage.send_outage_sms()
|
||||
outage.outage_sms_sent = True
|
||||
outage.save(update_fields=["outage_sms_sent"])
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
alert.agent.send_recovery_sms()
|
||||
alert.resolved_sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_sms_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_recovery_sms_task(pk):
|
||||
sleep(random.randint(1, 3))
|
||||
outage = AgentOutage.objects.get(pk=pk)
|
||||
outage.send_recovery_sms()
|
||||
outage.recovery_sms_sent = True
|
||||
outage.save(update_fields=["recovery_sms_sent"])
|
||||
def agent_outages_task() -> None:
|
||||
from alerts.models import Alert
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_outages_task():
|
||||
agents = Agent.objects.only(
|
||||
"pk", "last_seen", "overdue_time", "overdue_email_alert", "overdue_text_alert"
|
||||
"pk",
|
||||
"last_seen",
|
||||
"offline_time",
|
||||
"overdue_time",
|
||||
"overdue_email_alert",
|
||||
"overdue_text_alert",
|
||||
"overdue_dashboard_alert",
|
||||
)
|
||||
|
||||
for agent in agents:
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
outages = AgentOutage.objects.filter(agent=agent)
|
||||
if outages and outages.last().is_active:
|
||||
continue
|
||||
|
||||
outage = AgentOutage(agent=agent)
|
||||
outage.save()
|
||||
|
||||
# add a null check history to allow gaps in graph
|
||||
for check in agent.agentchecks.all():
|
||||
check.add_check_history(None)
|
||||
|
||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
||||
agent_outage_email_task.delay(pk=outage.pk)
|
||||
|
||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
||||
agent_outage_sms_task.delay(pk=outage.pk)
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_agent_recovery_task(pk: int) -> None:
|
||||
sleep(10)
|
||||
from agents.models import RecoveryAction
|
||||
|
||||
action = RecoveryAction.objects.get(pk=pk)
|
||||
if action.mode == "command":
|
||||
data = {"func": "recoverycmd", "recoverycommand": action.command}
|
||||
else:
|
||||
data = {"func": "recover", "payload": {"mode": action.mode}}
|
||||
|
||||
asyncio.run(action.agent.nats_cmd(data, wait=False))
|
||||
Alert.handle_alert_failure(agent)
|
||||
|
||||
|
||||
@app.task
|
||||
def run_script_email_results_task(
|
||||
agentpk: int, scriptpk: int, nats_timeout: int, nats_data: dict, emails: List[str]
|
||||
agentpk: int,
|
||||
scriptpk: int,
|
||||
nats_timeout: int,
|
||||
emails: list[str],
|
||||
args: list[str] = [],
|
||||
):
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
nats_data["func"] = "runscriptfull"
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=nats_timeout))
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk, args=args, full=True, timeout=nats_timeout, wait=True
|
||||
)
|
||||
if r == "timeout":
|
||||
logger.error(f"{agent.hostname} timed out running script.")
|
||||
return
|
||||
@@ -309,18 +252,3 @@ def run_script_email_results_task(
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
|
||||
@app.task
|
||||
def remove_salt_task() -> None:
|
||||
if hasattr(settings, "KEEP_SALT") and settings.KEEP_SALT:
|
||||
return
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
agents = [i for i in q if pyver.parse(i.version) >= pyver.parse("1.3.0")]
|
||||
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||
for chunk in chunks:
|
||||
for agent in chunk:
|
||||
asyncio.run(agent.nats_cmd({"func": "removesalt"}, wait=False))
|
||||
sleep(0.1)
|
||||
sleep(4)
|
||||
|
||||
@@ -1,20 +1,77 @@
|
||||
import json
|
||||
import os
|
||||
from itertools import cycle
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
from django.test import TestCase, override_settings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from logs.models import PendingAction
|
||||
from model_bakery import baker
|
||||
from packaging import version as pyver
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from .serializers import AgentSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from .models import Agent
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent
|
||||
from .serializers import AgentSerializer
|
||||
from .tasks import auto_self_agent_update_task
|
||||
|
||||
|
||||
class TestAgentsList(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_agents_list(self):
|
||||
url = "/agents/listagents/"
|
||||
|
||||
# 36 total agents
|
||||
company1 = baker.make("clients.Client")
|
||||
company2 = baker.make("clients.Client")
|
||||
site1 = baker.make("clients.Site", client=company1)
|
||||
site2 = baker.make("clients.Site", client=company1)
|
||||
site3 = baker.make("clients.Site", client=company2)
|
||||
|
||||
baker.make_recipe(
|
||||
"agents.online_agent", site=site1, monitoring_type="server", _quantity=15
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.online_agent",
|
||||
site=site2,
|
||||
monitoring_type="workstation",
|
||||
_quantity=10,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.online_agent",
|
||||
site=site3,
|
||||
monitoring_type="server",
|
||||
_quantity=4,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.online_agent",
|
||||
site=site3,
|
||||
monitoring_type="workstation",
|
||||
_quantity=7,
|
||||
)
|
||||
|
||||
# test all agents
|
||||
r = self.client.patch(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.data), 36) # type: ignore
|
||||
|
||||
# test client1
|
||||
data = {"clientPK": company1.pk} # type: ignore
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.data), 25) # type: ignore
|
||||
|
||||
# test site3
|
||||
data = {"sitePK": site3.pk} # type: ignore
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.data), 11) # type: ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestAgentViews(TacticalTestCase):
|
||||
@@ -64,12 +121,34 @@ class TestAgentViews(TacticalTestCase):
|
||||
@patch("agents.tasks.send_agent_update_task.delay")
|
||||
def test_update_agents(self, mock_task):
|
||||
url = "/agents/updateagents/"
|
||||
data = {"pks": [1, 2, 3, 5, 10], "version": "0.11.1"}
|
||||
baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
_quantity=15,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.3.0",
|
||||
_quantity=15,
|
||||
)
|
||||
|
||||
pks: list[int] = list(
|
||||
Agent.objects.only("pk", "version").values_list("pk", flat=True)
|
||||
)
|
||||
|
||||
data = {"pks": pks}
|
||||
expected: list[int] = [
|
||||
i.pk
|
||||
for i in Agent.objects.only("pk", "version")
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_task.assert_called_with(pks=data["pks"], version=data["version"])
|
||||
mock_task.assert_called_with(pks=expected)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -119,11 +198,6 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_processes(self, mock_ret):
|
||||
agent_old = baker.make_recipe("agents.online_agent", version="1.1.12")
|
||||
url_old = f"/agents/{agent_old.pk}/getprocs/"
|
||||
r = self.client.get(url_old)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
|
||||
url = f"/agents/{agent.pk}/getprocs/"
|
||||
|
||||
@@ -162,18 +236,44 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_event_log(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Application/30/"
|
||||
def test_get_event_log(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Application/22/"
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
|
||||
) as f:
|
||||
mock_ret.return_value = json.load(f)
|
||||
nats_cmd.return_value = json.load(f)
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "eventlog",
|
||||
"timeout": 30,
|
||||
"payload": {
|
||||
"logname": "Application",
|
||||
"days": str(22),
|
||||
},
|
||||
},
|
||||
timeout=32,
|
||||
)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Security/6/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "eventlog",
|
||||
"timeout": 180,
|
||||
"payload": {
|
||||
"logname": "Security",
|
||||
"days": str(6),
|
||||
},
|
||||
},
|
||||
timeout=182,
|
||||
)
|
||||
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@@ -208,7 +308,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
mock_ret.return_value = "nt authority\system"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIsInstance(r.data, str)
|
||||
self.assertIsInstance(r.data, str) # type: ignore
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -228,15 +328,16 @@ class TestAgentViews(TacticalTestCase):
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")
|
||||
self.assertEqual(r.data["agent"], self.agent.hostname)
|
||||
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM") # type: ignore
|
||||
self.assertEqual(r.data["agent"], self.agent.hostname) # type: ignore
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"deleteafter": True,
|
||||
"trigger": "once",
|
||||
"name": r.data["task_name"],
|
||||
"name": r.data["task_name"], # type: ignore
|
||||
"year": 2025,
|
||||
"month": "August",
|
||||
"day": 29,
|
||||
@@ -257,7 +358,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.patch(url, data_invalid, format="json")
|
||||
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "Invalid date")
|
||||
self.assertEqual(r.data, "Invalid date") # type: ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@@ -268,8 +369,8 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
site = baker.make("clients.Site")
|
||||
data = {
|
||||
"client": site.client.id,
|
||||
"site": site.id,
|
||||
"client": site.client.id, # type: ignore
|
||||
"site": site.id, # type: ignore
|
||||
"arch": "64",
|
||||
"expires": 23,
|
||||
"installMethod": "exe",
|
||||
@@ -308,7 +409,6 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertIn("rdp", r.json()["cmd"])
|
||||
self.assertNotIn("power", r.json()["cmd"])
|
||||
self.assertNotIn("ping", r.json()["cmd"])
|
||||
|
||||
data.update({"ping": 1, "power": 1})
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -317,50 +417,69 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_recover(self):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_recover(self, nats_cmd):
|
||||
from agents.models import RecoveryAction
|
||||
|
||||
self.agent.version = "0.11.1"
|
||||
self.agent.save(update_fields=["version"])
|
||||
RecoveryAction.objects.all().delete()
|
||||
url = "/agents/recover/"
|
||||
data = {"pk": self.agent.pk, "cmd": None, "mode": "mesh"}
|
||||
agent = baker.make_recipe("agents.online_agent")
|
||||
|
||||
# test mesh realtime
|
||||
data = {"pk": agent.pk, "cmd": None, "mode": "mesh"}
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 0)
|
||||
nats_cmd.assert_called_with(
|
||||
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=10
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
data["mode"] = "mesh"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("pending", r.json())
|
||||
|
||||
RecoveryAction.objects.all().delete()
|
||||
data["mode"] = "command"
|
||||
data["cmd"] = "ipconfig /flushdns"
|
||||
# test mesh with agent rpc not working
|
||||
data = {"pk": agent.pk, "cmd": None, "mode": "mesh"}
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
RecoveryAction.objects.all().delete()
|
||||
data["cmd"] = None
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.assertEqual(RecoveryAction.objects.count(), 1)
|
||||
mesh_recovery = RecoveryAction.objects.first()
|
||||
self.assertEqual(mesh_recovery.mode, "mesh")
|
||||
nats_cmd.reset_mock()
|
||||
RecoveryAction.objects.all().delete()
|
||||
|
||||
self.agent.version = "0.9.4"
|
||||
self.agent.save(update_fields=["version"])
|
||||
data["mode"] = "mesh"
|
||||
# test tacagent realtime
|
||||
data = {"pk": agent.pk, "cmd": None, "mode": "tacagent"}
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("0.9.5", r.json())
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_agents_list(self):
|
||||
url = "/agents/listagents/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 0)
|
||||
nats_cmd.assert_called_with(
|
||||
{"func": "recover", "payload": {"mode": "tacagent"}}, timeout=10
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
# test tacagent with rpc not working
|
||||
data = {"pk": agent.pk, "cmd": None, "mode": "tacagent"}
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 0)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# test shell cmd without command
|
||||
data = {"pk": agent.pk, "cmd": None, "mode": "command"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 0)
|
||||
|
||||
# test shell cmd
|
||||
data = {"pk": agent.pk, "cmd": "shutdown /r /t 10 /f", "mode": "command"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 1)
|
||||
cmd_recovery = RecoveryAction.objects.first()
|
||||
self.assertEqual(cmd_recovery.mode, "command")
|
||||
self.assertEqual(cmd_recovery.command, "shutdown /r /t 10 /f")
|
||||
|
||||
def test_agents_agent_detail(self):
|
||||
url = f"/agents/{self.agent.pk}/agentdetail/"
|
||||
@@ -378,9 +497,10 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
edit = {
|
||||
"id": self.agent.pk,
|
||||
"site": site.id,
|
||||
"site": site.id, # type: ignore
|
||||
"monitoring_type": "workstation",
|
||||
"description": "asjdk234andasd",
|
||||
"offline_time": 4,
|
||||
"overdue_time": 300,
|
||||
"check_interval": 60,
|
||||
"overdue_email_alert": True,
|
||||
@@ -408,7 +528,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
data = AgentSerializer(agent).data
|
||||
self.assertEqual(data["site"], site.id)
|
||||
self.assertEqual(data["site"], site.id) # type: ignore
|
||||
|
||||
policy = WinUpdatePolicy.objects.get(agent=self.agent)
|
||||
data = WinUpdatePolicySerializer(policy).data
|
||||
@@ -426,21 +546,21 @@ class TestAgentViews(TacticalTestCase):
|
||||
# TODO
|
||||
# decode the cookie
|
||||
|
||||
self.assertIn("&viewmode=13", r.data["file"])
|
||||
self.assertIn("&viewmode=12", r.data["terminal"])
|
||||
self.assertIn("&viewmode=11", r.data["control"])
|
||||
self.assertIn("&viewmode=13", r.data["file"]) # type: ignore
|
||||
self.assertIn("&viewmode=12", r.data["terminal"]) # type: ignore
|
||||
self.assertIn("&viewmode=11", r.data["control"]) # type: ignore
|
||||
|
||||
self.assertIn("&gotonode=", r.data["file"])
|
||||
self.assertIn("&gotonode=", r.data["terminal"])
|
||||
self.assertIn("&gotonode=", r.data["control"])
|
||||
self.assertIn("&gotonode=", r.data["file"]) # type: ignore
|
||||
self.assertIn("&gotonode=", r.data["terminal"]) # type: ignore
|
||||
self.assertIn("&gotonode=", r.data["control"]) # type: ignore
|
||||
|
||||
self.assertIn("?login=", r.data["file"])
|
||||
self.assertIn("?login=", r.data["terminal"])
|
||||
self.assertIn("?login=", r.data["control"])
|
||||
self.assertIn("?login=", r.data["file"]) # type: ignore
|
||||
self.assertIn("?login=", r.data["terminal"]) # type: ignore
|
||||
self.assertIn("?login=", r.data["control"]) # type: ignore
|
||||
|
||||
self.assertEqual(self.agent.hostname, r.data["hostname"])
|
||||
self.assertEqual(self.agent.client.name, r.data["client"])
|
||||
self.assertEqual(self.agent.site.name, r.data["site"])
|
||||
self.assertEqual(self.agent.hostname, r.data["hostname"]) # type: ignore
|
||||
self.assertEqual(self.agent.client.name, r.data["client"]) # type: ignore
|
||||
self.assertEqual(self.agent.site.name, r.data["site"]) # type: ignore
|
||||
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -450,70 +570,22 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_by_client(self):
|
||||
url = f"/agents/byclient/{self.agent.client.id}/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(r.data)
|
||||
|
||||
url = f"/agents/byclient/500/"
|
||||
r = self.client.get(url)
|
||||
self.assertFalse(r.data) # returns empty list
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_by_site(self):
|
||||
url = f"/agents/bysite/{self.agent.site.id}/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(r.data)
|
||||
|
||||
url = f"/agents/bysite/500/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.data, [])
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_overdue_action(self):
|
||||
url = "/agents/overdueaction/"
|
||||
|
||||
payload = {"pk": self.agent.pk, "alertType": "email", "action": "enabled"}
|
||||
payload = {"pk": self.agent.pk, "overdue_email_alert": True}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertTrue(agent.overdue_email_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
self.assertEqual(self.agent.hostname, r.data) # type: ignore
|
||||
|
||||
payload.update({"alertType": "email", "action": "disabled"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertFalse(agent.overdue_email_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "text", "action": "enabled"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertTrue(agent.overdue_text_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "text", "action": "disabled"})
|
||||
payload = {"pk": self.agent.pk, "overdue_text_alert": False}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertFalse(agent.overdue_text_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "email", "action": "523423"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
payload.update({"alertType": "text", "action": "asdasd3434asdasd"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(self.agent.hostname, r.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -657,7 +729,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn(self.agent.hostname, r.data)
|
||||
self.assertIn(self.agent.hostname, r.data) # type: ignore
|
||||
nats_cmd.assert_called_with(
|
||||
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=45
|
||||
)
|
||||
@@ -672,10 +744,82 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.tasks.run_script_email_results_task.delay")
|
||||
@patch("agents.models.Agent.run_script")
|
||||
def test_run_script(self, run_script, email_task):
|
||||
run_script.return_value = "ok"
|
||||
url = "/agents/runscript/"
|
||||
script = baker.make_recipe("scripts.script")
|
||||
|
||||
# test wait
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"output": "wait",
|
||||
"args": [],
|
||||
"timeout": 15,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk, args=[], timeout=18, wait=True
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
# test email default
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"output": "email",
|
||||
"args": ["abc", "123"],
|
||||
"timeout": 15,
|
||||
"emailmode": "default",
|
||||
"emails": ["admin@example.com", "bob@example.com"],
|
||||
}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
email_task.assert_called_with(
|
||||
agentpk=self.agent.pk,
|
||||
scriptpk=script.pk,
|
||||
nats_timeout=18,
|
||||
emails=[],
|
||||
args=["abc", "123"],
|
||||
)
|
||||
email_task.reset_mock()
|
||||
|
||||
# test email overrides
|
||||
data["emailmode"] = "custom"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
email_task.assert_called_with(
|
||||
agentpk=self.agent.pk,
|
||||
scriptpk=script.pk,
|
||||
nats_timeout=18,
|
||||
emails=["admin@example.com", "bob@example.com"],
|
||||
args=["abc", "123"],
|
||||
)
|
||||
|
||||
# test fire and forget
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"output": "forget",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk, args=["hello", "world"], timeout=25
|
||||
)
|
||||
|
||||
|
||||
class TestAgentViewsNew(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_agent_counts(self):
|
||||
url = "/agents/agent_counts/"
|
||||
@@ -686,15 +830,12 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
monitoring_type=cycle(["server", "workstation"]),
|
||||
_quantity=6,
|
||||
)
|
||||
agents = baker.make_recipe(
|
||||
baker.make_recipe(
|
||||
"agents.overdue_agent",
|
||||
monitoring_type=cycle(["server", "workstation"]),
|
||||
_quantity=6,
|
||||
)
|
||||
|
||||
# make an AgentOutage for every overdue agent
|
||||
baker.make("agents.AgentOutage", agent=cycle(agents), _quantity=6)
|
||||
|
||||
# returned data should be this
|
||||
data = {
|
||||
"total_server_count": 6,
|
||||
@@ -705,7 +846,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
|
||||
r = self.client.post(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, data)
|
||||
self.assertEqual(r.data, data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -717,14 +858,14 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
|
||||
# Test client toggle maintenance mode
|
||||
data = {"type": "Client", "id": site.client.id, "action": True}
|
||||
data = {"type": "Client", "id": site.client.id, "action": True} # type: ignore
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(Agent.objects.get(pk=agent.pk).maintenance_mode)
|
||||
|
||||
# Test site toggle maintenance mode
|
||||
data = {"type": "Site", "id": site.id, "action": False}
|
||||
data = {"type": "Site", "id": site.id, "action": False} # type: ignore
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
@@ -758,26 +899,28 @@ class TestAgentTasks(TacticalTestCase):
|
||||
agent_noarch = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Error getting OS",
|
||||
version="1.1.11",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
)
|
||||
r = agent_update(agent_noarch.pk)
|
||||
self.assertEqual(r, "noarch")
|
||||
self.assertEqual(
|
||||
PendingAction.objects.filter(
|
||||
agent=agent_noarch, action_type="agentupdate"
|
||||
).count(),
|
||||
0,
|
||||
)
|
||||
|
||||
agent64_111 = baker.make_recipe(
|
||||
agent_1111 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.11",
|
||||
)
|
||||
r = agent_update(agent_1111.pk)
|
||||
self.assertEqual(r, "not supported")
|
||||
|
||||
r = agent_update(agent64_111.pk)
|
||||
agent64_1112 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.12",
|
||||
)
|
||||
|
||||
r = agent_update(agent64_1112.pk)
|
||||
self.assertEqual(r, "created")
|
||||
action = PendingAction.objects.get(agent__pk=agent64_111.pk)
|
||||
action = PendingAction.objects.get(agent__pk=agent64_1112.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
self.assertEqual(
|
||||
@@ -786,6 +929,17 @@ class TestAgentTasks(TacticalTestCase):
|
||||
)
|
||||
self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe")
|
||||
self.assertEqual(action.details["version"], "1.3.0")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
"version": "1.3.0",
|
||||
"inno": "winagent-v1.3.0.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
|
||||
agent_64_130 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
@@ -806,128 +960,34 @@ class TestAgentTasks(TacticalTestCase):
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
action = PendingAction.objects.get(agent__pk=agent_64_130.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
agent64_old = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.2.1",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
r = agent_update(agent64_old.pk)
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
"version": "1.3.0",
|
||||
"inno": "winagent-v1.3.0.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
|
||||
""" @patch("agents.models.Agent.salt_api_async")
|
||||
@patch("agents.tasks.agent_update")
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
|
||||
# test 64bit golang agent
|
||||
self.agent64 = baker.make_recipe(
|
||||
def test_auto_self_agent_update_task(self, mock_sleep, agent_update):
|
||||
baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.0.0",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
_quantity=23,
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
"url": settings.DL_64,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.agent64.delete()
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test 32bit golang agent
|
||||
self.agent32 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 7 Professional, 32 bit (build 7601.24544)",
|
||||
version="1.0.0",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe",
|
||||
"url": settings.DL_32,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.agent32.delete()
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test agent that has a null os field
|
||||
self.agentNone = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system=None,
|
||||
version="1.0.0",
|
||||
)
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_not_called()
|
||||
self.agentNone.delete()
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test auto update disabled in global settings
|
||||
self.agent64 = baker.make_recipe(
|
||||
baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.0.0",
|
||||
version="1.3.0",
|
||||
_quantity=33,
|
||||
)
|
||||
|
||||
self.coresettings.agent_auto_update = False
|
||||
self.coresettings.save(update_fields=["agent_auto_update"])
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_not_called()
|
||||
|
||||
# reset core settings
|
||||
self.agent64.delete()
|
||||
salt_api_async.reset_mock()
|
||||
r = auto_self_agent_update_task.s().apply()
|
||||
self.assertEqual(agent_update.call_count, 0)
|
||||
|
||||
self.coresettings.agent_auto_update = True
|
||||
self.coresettings.save(update_fields=["agent_auto_update"])
|
||||
|
||||
# test 64bit python agent
|
||||
self.agent64py = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="0.11.1",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": "winagent-v0.11.2.exe",
|
||||
"url": OLD_64_PY_AGENT,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.agent64py.delete()
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test 32bit python agent
|
||||
self.agent32py = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 7 Professional, 32 bit (build 7601.24544)",
|
||||
version="0.11.1",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": "winagent-v0.11.2-x86.exe",
|
||||
"url": OLD_32_PY_AGENT,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS") """
|
||||
r = auto_self_agent_update_task.s().apply()
|
||||
self.assertEqual(agent_update.call_count, 33)
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("listagents/", views.AgentsTableList.as_view()),
|
||||
path("listagentsnodetail/", views.list_agents_no_detail),
|
||||
path("<int:pk>/agenteditdetails/", views.agent_edit_details),
|
||||
path("byclient/<int:clientpk>/", views.by_client),
|
||||
path("bysite/<int:sitepk>/", views.by_site),
|
||||
path("overdueaction/", views.overdue_action),
|
||||
path("sendrawcmd/", views.send_raw_cmd),
|
||||
path("<pk>/agentdetail/", views.agent_detail),
|
||||
|
||||
@@ -1,46 +1,43 @@
|
||||
import asyncio
|
||||
from loguru import logger
|
||||
import datetime as dt
|
||||
import os
|
||||
import subprocess
|
||||
import pytz
|
||||
import random
|
||||
import string
|
||||
import datetime as dt
|
||||
from packaging import version as pyver
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.http import HttpResponse
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status, generics
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from .models import Agent, AgentOutage, RecoveryAction, Note
|
||||
from core.models import CoreSettings
|
||||
from scripts.models import Script
|
||||
from logs.models import AuditLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
from tacticalrmm.utils import (
|
||||
generate_installer_exe,
|
||||
get_default_timezone,
|
||||
notify_error,
|
||||
reload_nats,
|
||||
)
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
|
||||
from .models import Agent, Note, RecoveryAction
|
||||
from .serializers import (
|
||||
AgentSerializer,
|
||||
AgentHostnameSerializer,
|
||||
AgentTableSerializer,
|
||||
AgentEditSerializer,
|
||||
AgentHostnameSerializer,
|
||||
AgentOverdueActionSerializer,
|
||||
AgentSerializer,
|
||||
AgentTableSerializer,
|
||||
NoteSerializer,
|
||||
NotesSerializer,
|
||||
)
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .tasks import (
|
||||
send_agent_update_task,
|
||||
run_script_email_results_task,
|
||||
)
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
|
||||
from tacticalrmm.utils import notify_error, reload_nats
|
||||
from .tasks import run_script_email_results_task, send_agent_update_task
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@@ -58,9 +55,13 @@ def get_agent_versions(request):
|
||||
|
||||
@api_view(["POST"])
|
||||
def update_agents(request):
|
||||
pks = request.data["pks"]
|
||||
version = request.data["version"]
|
||||
send_agent_update_task.delay(pks=pks, version=version)
|
||||
q = Agent.objects.filter(pk__in=request.data["pks"]).only("pk", "version")
|
||||
pks: list[int] = [
|
||||
i.pk
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
send_agent_update_task.delay(pks=pks)
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -68,7 +69,6 @@ def update_agents(request):
|
||||
def ping(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
status = "offline"
|
||||
if agent.has_nats:
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
@@ -79,7 +79,6 @@ def ping(request, pk):
|
||||
@api_view(["DELETE"])
|
||||
def uninstall(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
|
||||
name = agent.hostname
|
||||
@@ -92,23 +91,18 @@ def uninstall(request):
|
||||
def edit_agent(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||
|
||||
old_site = agent.site.pk
|
||||
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer.is_valid(raise_exception=True)
|
||||
a_serializer.save()
|
||||
|
||||
policy = agent.winupdatepolicy.get()
|
||||
if "winupdatepolicy" in request.data.keys():
|
||||
policy = agent.winupdatepolicy.get() # type: ignore
|
||||
p_serializer = WinUpdatePolicySerializer(
|
||||
instance=policy, data=request.data["winupdatepolicy"][0]
|
||||
)
|
||||
p_serializer.is_valid(raise_exception=True)
|
||||
p_serializer.save()
|
||||
|
||||
# check if site changed and initiate generating correct policies
|
||||
if old_site != request.data["site"]:
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -151,9 +145,6 @@ def agent_detail(request, pk):
|
||||
@api_view()
|
||||
def get_processes(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if pyver.parse(agent.version) < pyver.parse("1.2.0"):
|
||||
return notify_error("Requires agent version 1.2.0 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
@@ -163,9 +154,6 @@ def get_processes(request, pk):
|
||||
@api_view()
|
||||
def kill_proc(request, pk, pid):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r = asyncio.run(
|
||||
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
|
||||
)
|
||||
@@ -181,17 +169,16 @@ def kill_proc(request, pk, pid):
|
||||
@api_view()
|
||||
def get_event_log(request, pk, logtype, days):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = 180 if logtype == "Security" else 30
|
||||
data = {
|
||||
"func": "eventlog",
|
||||
"timeout": 30,
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"logname": logtype,
|
||||
"days": str(days),
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=32))
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
@@ -201,8 +188,6 @@ def get_event_log(request, pk, logtype, days):
|
||||
@api_view(["POST"])
|
||||
def send_raw_cmd(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = int(request.data["timeout"])
|
||||
data = {
|
||||
"func": "rawcmd",
|
||||
@@ -227,21 +212,39 @@ def send_raw_cmd(request):
|
||||
return Response(r)
|
||||
|
||||
|
||||
class AgentsTableList(generics.ListAPIView):
|
||||
class AgentsTableList(APIView):
|
||||
def patch(self, request):
|
||||
if "sitePK" in request.data.keys():
|
||||
queryset = (
|
||||
Agent.objects.select_related("site")
|
||||
Agent.objects.select_related("site", "policy", "alert_template")
|
||||
.prefetch_related("agentchecks")
|
||||
.only(
|
||||
.filter(site_id=request.data["sitePK"])
|
||||
)
|
||||
elif "clientPK" in request.data.keys():
|
||||
queryset = (
|
||||
Agent.objects.select_related("site", "policy", "alert_template")
|
||||
.prefetch_related("agentchecks")
|
||||
.filter(site__client_id=request.data["clientPK"])
|
||||
)
|
||||
else:
|
||||
queryset = Agent.objects.select_related(
|
||||
"site", "policy", "alert_template"
|
||||
).prefetch_related("agentchecks")
|
||||
|
||||
queryset = queryset.only(
|
||||
"pk",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"site",
|
||||
"policy",
|
||||
"alert_template",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"logged_in_username",
|
||||
@@ -249,14 +252,7 @@ class AgentsTableList(generics.ListAPIView):
|
||||
"time_zone",
|
||||
"maintenance_mode",
|
||||
)
|
||||
)
|
||||
serializer_class = AgentTableSerializer
|
||||
|
||||
def list(self, request):
|
||||
queryset = self.get_queryset()
|
||||
ctx = {
|
||||
"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)
|
||||
}
|
||||
ctx = {"default_tz": get_default_timezone()}
|
||||
serializer = AgentTableSerializer(queryset, many=True, context=ctx)
|
||||
return Response(serializer.data)
|
||||
|
||||
@@ -273,86 +269,14 @@ def agent_edit_details(request, pk):
|
||||
return Response(AgentEditSerializer(agent).data)
|
||||
|
||||
|
||||
@api_view()
|
||||
def by_client(request, clientpk):
|
||||
agents = (
|
||||
Agent.objects.select_related("site")
|
||||
.filter(site__client_id=clientpk)
|
||||
.prefetch_related("agentchecks")
|
||||
.only(
|
||||
"pk",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"site",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_time",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"logged_in_username",
|
||||
"last_logged_in_user",
|
||||
"time_zone",
|
||||
"maintenance_mode",
|
||||
)
|
||||
)
|
||||
ctx = {"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)}
|
||||
return Response(AgentTableSerializer(agents, many=True, context=ctx).data)
|
||||
|
||||
|
||||
@api_view()
|
||||
def by_site(request, sitepk):
|
||||
agents = (
|
||||
Agent.objects.filter(site_id=sitepk)
|
||||
.select_related("site")
|
||||
.prefetch_related("agentchecks")
|
||||
.only(
|
||||
"pk",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"site",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_time",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"logged_in_username",
|
||||
"last_logged_in_user",
|
||||
"time_zone",
|
||||
"maintenance_mode",
|
||||
)
|
||||
)
|
||||
ctx = {"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)}
|
||||
return Response(AgentTableSerializer(agents, many=True, context=ctx).data)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def overdue_action(request):
|
||||
pk = request.data["pk"]
|
||||
alert_type = request.data["alertType"]
|
||||
action = request.data["action"]
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if alert_type == "email" and action == "enabled":
|
||||
agent.overdue_email_alert = True
|
||||
agent.save(update_fields=["overdue_email_alert"])
|
||||
elif alert_type == "email" and action == "disabled":
|
||||
agent.overdue_email_alert = False
|
||||
agent.save(update_fields=["overdue_email_alert"])
|
||||
elif alert_type == "text" and action == "enabled":
|
||||
agent.overdue_text_alert = True
|
||||
agent.save(update_fields=["overdue_text_alert"])
|
||||
elif alert_type == "text" and action == "disabled":
|
||||
agent.overdue_text_alert = False
|
||||
agent.save(update_fields=["overdue_text_alert"])
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Something went wrong"}, status=status.HTTP_400_BAD_REQUEST
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
serializer = AgentOverdueActionSerializer(
|
||||
instance=agent, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response(agent.hostname)
|
||||
|
||||
|
||||
@@ -360,9 +284,6 @@ class Reboot(APIView):
|
||||
# reboot now
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
@@ -372,8 +293,6 @@ class Reboot(APIView):
|
||||
# reboot later
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
try:
|
||||
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
|
||||
@@ -388,6 +307,7 @@ class Reboot(APIView):
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"deleteafter": True,
|
||||
"trigger": "once",
|
||||
"name": task_name,
|
||||
"year": int(dt.datetime.strftime(obj, "%Y")),
|
||||
@@ -398,9 +318,6 @@ class Reboot(APIView):
|
||||
},
|
||||
}
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.1.2"):
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error(r)
|
||||
@@ -446,124 +363,20 @@ def install_agent(request):
|
||||
)
|
||||
|
||||
if request.data["installMethod"] == "exe":
|
||||
go_bin = "/usr/local/rmmgo/go/bin/go"
|
||||
|
||||
if not os.path.exists(go_bin):
|
||||
return Response("nogolang", status=status.HTTP_409_CONFLICT)
|
||||
|
||||
api = request.data["api"]
|
||||
atype = request.data["agenttype"]
|
||||
rdp = request.data["rdp"]
|
||||
ping = request.data["ping"]
|
||||
power = request.data["power"]
|
||||
|
||||
file_name = "rmm-installer.exe"
|
||||
exe = os.path.join(settings.EXE_DIR, file_name)
|
||||
|
||||
if os.path.exists(exe):
|
||||
try:
|
||||
os.remove(exe)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
|
||||
goarch = "amd64" if arch == "64" else "386"
|
||||
cmd = [
|
||||
"env",
|
||||
"GOOS=windows",
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"build",
|
||||
f"-ldflags=\"-X 'main.Inno={inno}'",
|
||||
f"-X 'main.Api={api}'",
|
||||
f"-X 'main.Client={client_id}'",
|
||||
f"-X 'main.Site={site_id}'",
|
||||
f"-X 'main.Atype={atype}'",
|
||||
f"-X 'main.Rdp={rdp}'",
|
||||
f"-X 'main.Ping={ping}'",
|
||||
f"-X 'main.Power={power}'",
|
||||
f"-X 'main.DownloadUrl={download_url}'",
|
||||
f"-X 'main.Token={token}'\"",
|
||||
"-o",
|
||||
exe,
|
||||
]
|
||||
|
||||
build_error = False
|
||||
gen_error = False
|
||||
|
||||
gen = [
|
||||
"env",
|
||||
"GOOS=windows",
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"generate",
|
||||
]
|
||||
try:
|
||||
r1 = subprocess.run(
|
||||
" ".join(gen),
|
||||
capture_output=True,
|
||||
shell=True,
|
||||
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
|
||||
return generate_installer_exe(
|
||||
file_name="rmm-installer.exe",
|
||||
goarch="amd64" if arch == "64" else "386",
|
||||
inno=inno,
|
||||
api=request.data["api"],
|
||||
client_id=client_id,
|
||||
site_id=site_id,
|
||||
atype=request.data["agenttype"],
|
||||
rdp=request.data["rdp"],
|
||||
ping=request.data["ping"],
|
||||
power=request.data["power"],
|
||||
download_url=download_url,
|
||||
token=token,
|
||||
)
|
||||
except Exception as e:
|
||||
gen_error = True
|
||||
logger.error(str(e))
|
||||
return Response(
|
||||
"genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE
|
||||
)
|
||||
|
||||
if r1.returncode != 0:
|
||||
gen_error = True
|
||||
if r1.stdout:
|
||||
logger.error(r1.stdout.decode("utf-8", errors="ignore"))
|
||||
|
||||
if r1.stderr:
|
||||
logger.error(r1.stderr.decode("utf-8", errors="ignore"))
|
||||
|
||||
logger.error(f"Go build failed with return code {r1.returncode}")
|
||||
|
||||
if gen_error:
|
||||
return Response(
|
||||
"genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE
|
||||
)
|
||||
|
||||
try:
|
||||
r = subprocess.run(
|
||||
" ".join(cmd),
|
||||
capture_output=True,
|
||||
shell=True,
|
||||
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
|
||||
)
|
||||
except Exception as e:
|
||||
build_error = True
|
||||
logger.error(str(e))
|
||||
return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED)
|
||||
|
||||
if r.returncode != 0:
|
||||
build_error = True
|
||||
if r.stdout:
|
||||
logger.error(r.stdout.decode("utf-8", errors="ignore"))
|
||||
|
||||
if r.stderr:
|
||||
logger.error(r.stderr.decode("utf-8", errors="ignore"))
|
||||
|
||||
logger.error(f"Go build failed with return code {r.returncode}")
|
||||
|
||||
if build_error:
|
||||
return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED)
|
||||
|
||||
if settings.DEBUG:
|
||||
with open(exe, "rb") as f:
|
||||
response = HttpResponse(
|
||||
f.read(),
|
||||
content_type="application/vnd.microsoft.portable-executable",
|
||||
)
|
||||
response["Content-Disposition"] = f"inline; filename={file_name}"
|
||||
return response
|
||||
else:
|
||||
response = HttpResponse()
|
||||
response["Content-Disposition"] = f"attachment; filename={file_name}"
|
||||
response["X-Accel-Redirect"] = f"/private/exe/{file_name}"
|
||||
return response
|
||||
|
||||
elif request.data["installMethod"] == "manual":
|
||||
cmd = [
|
||||
@@ -571,12 +384,10 @@ def install_agent(request):
|
||||
"/VERYSILENT",
|
||||
"/SUPPRESSMSGBOXES",
|
||||
"&&",
|
||||
"timeout",
|
||||
"/t",
|
||||
"10",
|
||||
"/nobreak",
|
||||
">",
|
||||
"NUL",
|
||||
"ping",
|
||||
"127.0.0.1",
|
||||
"-n",
|
||||
"5",
|
||||
"&&",
|
||||
r'"C:\Program Files\TacticalAgent\tacticalrmm.exe"',
|
||||
"-m",
|
||||
@@ -659,22 +470,14 @@ def recover(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
mode = request.data["mode"]
|
||||
|
||||
if pyver.parse(agent.version) <= pyver.parse("0.9.5"):
|
||||
return notify_error("Only available in agent version greater than 0.9.5")
|
||||
|
||||
if not agent.has_nats:
|
||||
if mode == "tacagent" or mode == "rpc":
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
||||
if agent.has_nats:
|
||||
# attempt a realtime recovery, otherwise fall back to old recovery method
|
||||
if mode == "tacagent" or mode == "mesh":
|
||||
data = {"func": "recover", "payload": {"mode": mode}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||
if r == "ok":
|
||||
return Response("Successfully completed recovery")
|
||||
|
||||
if agent.recoveryactions.filter(last_run=None).exists():
|
||||
if agent.recoveryactions.filter(last_run=None).exists(): # type: ignore
|
||||
return notify_error(
|
||||
"A recovery action is currently pending. Please wait for the next agent check-in."
|
||||
)
|
||||
@@ -702,10 +505,9 @@ def recover(request):
|
||||
@api_view(["POST"])
|
||||
def run_script(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
output = request.data["output"]
|
||||
args = request.data["args"]
|
||||
req_timeout = int(request.data["timeout"]) + 3
|
||||
|
||||
AuditLog.audit_script_run(
|
||||
@@ -714,23 +516,13 @@ def run_script(request):
|
||||
script=script.name,
|
||||
)
|
||||
|
||||
data = {
|
||||
"func": "runscript",
|
||||
"timeout": request.data["timeout"],
|
||||
"script_args": request.data["args"],
|
||||
"payload": {
|
||||
"code": script.code,
|
||||
"shell": script.shell,
|
||||
},
|
||||
}
|
||||
|
||||
if output == "wait":
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk, args=args, timeout=req_timeout, wait=True
|
||||
)
|
||||
return Response(r)
|
||||
elif output == "email":
|
||||
if not pyver.parse(agent.version) >= pyver.parse("1.1.12"):
|
||||
return notify_error("Requires agent version 1.1.12 or greater")
|
||||
|
||||
elif output == "email":
|
||||
emails = (
|
||||
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||
)
|
||||
@@ -738,21 +530,18 @@ def run_script(request):
|
||||
agentpk=agent.pk,
|
||||
scriptpk=script.pk,
|
||||
nats_timeout=req_timeout,
|
||||
nats_data=data,
|
||||
emails=emails,
|
||||
args=args,
|
||||
)
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
else:
|
||||
asyncio.run(agent.nats_cmd(data, wait=False))
|
||||
agent.run_script(scriptpk=script.pk, args=args, timeout=req_timeout)
|
||||
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
|
||||
|
||||
@api_view()
|
||||
def recover_mesh(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
data = {"func": "recover", "payload": {"mode": "mesh"}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=45))
|
||||
if r != "ok":
|
||||
@@ -834,7 +623,7 @@ def bulk(request):
|
||||
elif request.data["monType"] == "workstations":
|
||||
q = q.filter(monitoring_type="workstation")
|
||||
|
||||
agents: List[int] = [agent.pk for agent in q]
|
||||
agents: list[int] = [agent.pk for agent in q]
|
||||
|
||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||
|
||||
@@ -865,20 +654,43 @@ def bulk(request):
|
||||
|
||||
@api_view(["POST"])
|
||||
def agent_counts(request):
|
||||
|
||||
server_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
workstation_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"total_server_count": Agent.objects.filter(
|
||||
monitoring_type="server"
|
||||
).count(),
|
||||
"total_server_offline_count": AgentOutage.objects.filter(
|
||||
recovery_time=None, agent__monitoring_type="server"
|
||||
).count(),
|
||||
"total_server_offline_count": server_offline_count,
|
||||
"total_workstation_count": Agent.objects.filter(
|
||||
monitoring_type="workstation"
|
||||
).count(),
|
||||
"total_workstation_offline_count": AgentOutage.objects.filter(
|
||||
recovery_time=None, agent__monitoring_type="workstation"
|
||||
).count(),
|
||||
"total_workstation_offline_count": workstation_offline_count,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -909,9 +721,6 @@ def agent_maintenance(request):
|
||||
class WMI(APIView):
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if pyver.parse(agent.version) < pyver.parse("1.1.2"):
|
||||
return notify_error("Requires agent version 1.1.2 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Alert
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
|
||||
admin.site.register(Alert)
|
||||
admin.site.register(AlertTemplate)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.1 on 2020-08-15 15:31
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.1.2 on 2020-10-21 18:15
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
172
api/tacticalrmm/alerts/migrations/0004_auto_20210212_1408.py
Normal file
172
api/tacticalrmm/alerts/migrations/0004_auto_20210212_1408.py
Normal file
@@ -0,0 +1,172 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-12 14:08
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0029_delete_agentoutage'),
|
||||
('clients', '0008_auto_20201103_1430'),
|
||||
('autotasks', '0017_auto_20210210_1512'),
|
||||
('scripts', '0005_auto_20201207_1606'),
|
||||
('alerts', '0003_auto_20201021_1815'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_execution_time',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_retcode',
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_run',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_stderr',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_stdout',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_timeout',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='alert_type',
|
||||
field=models.CharField(choices=[('availability', 'Availability'), ('check', 'Check'), ('task', 'Task'), ('custom', 'Custom')], default='availability', max_length=20),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='assigned_task',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='autotasks.automatedtask'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='email_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='hidden',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_execution_time',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_retcode',
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_run',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_stderr',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_stdout',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_timeout',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_email_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_on',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_sms_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='sms_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='snoozed',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alert',
|
||||
name='severity',
|
||||
field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AlertTemplate',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=100)),
|
||||
('is_active', models.BooleanField(default=True)),
|
||||
('action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)),
|
||||
('resolved_action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)),
|
||||
('email_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)),
|
||||
('text_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)),
|
||||
('email_from', models.EmailField(blank=True, max_length=254, null=True)),
|
||||
('agent_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_include_desktops', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_always_email', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_always_text', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_always_alert', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
|
||||
('check_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('check_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('check_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('check_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('check_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('check_always_email', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('check_always_text', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('check_always_alert', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('check_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
|
||||
('task_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('task_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('task_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('task_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('task_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('task_always_email', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('task_always_text', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('task_always_alert', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('task_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
|
||||
('action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='alert_template', to='scripts.script')),
|
||||
('excluded_agents', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='agents.Agent')),
|
||||
('excluded_clients', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Client')),
|
||||
('excluded_sites', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Site')),
|
||||
('resolved_action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_alert_template', to='scripts.script')),
|
||||
],
|
||||
),
|
||||
]
|
||||
31
api/tacticalrmm/alerts/migrations/0005_auto_20210212_1745.py
Normal file
31
api/tacticalrmm/alerts/migrations/0005_auto_20210212_1745.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-12 17:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0004_auto_20210212_1408'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='alert',
|
||||
name='action_timeout',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='alert',
|
||||
name='resolved_action_timeout',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='action_timeout',
|
||||
field=models.PositiveIntegerField(default=15),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='resolved_action_timeout',
|
||||
field=models.PositiveIntegerField(default=15),
|
||||
),
|
||||
]
|
||||
72
api/tacticalrmm/alerts/migrations/0006_auto_20210217_1736.py
Normal file
72
api/tacticalrmm/alerts/migrations/0006_auto_20210217_1736.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# Generated by Django 3.1.6 on 2021-02-17 17:36
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0005_auto_20210212_1745'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_include_desktops',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='exclude_servers',
|
||||
field=models.BooleanField(blank=True, default=False, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='exclude_workstations',
|
||||
field=models.BooleanField(blank=True, default=False, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_always_alert',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_always_email',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_always_text',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='check_always_alert',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='check_always_email',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='check_always_text',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='task_always_alert',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='task_always_email',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='task_always_text',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,20 @@
|
||||
from django.db import models
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
SEVERITY_CHOICES = [
|
||||
("info", "Informational"),
|
||||
@@ -7,6 +22,13 @@ SEVERITY_CHOICES = [
|
||||
("error", "Error"),
|
||||
]
|
||||
|
||||
ALERT_TYPE_CHOICES = [
|
||||
("availability", "Availability"),
|
||||
("check", "Check"),
|
||||
("task", "Task"),
|
||||
("custom", "Custom"),
|
||||
]
|
||||
|
||||
|
||||
class Alert(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
@@ -23,21 +45,554 @@ class Alert(models.Model):
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
assigned_task = models.ForeignKey(
|
||||
"autotasks.AutomatedTask",
|
||||
related_name="alert",
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
alert_type = models.CharField(
|
||||
max_length=20, choices=ALERT_TYPE_CHOICES, default="availability"
|
||||
)
|
||||
message = models.TextField(null=True, blank=True)
|
||||
alert_time = models.DateTimeField(auto_now_add=True, null=True)
|
||||
alert_time = models.DateTimeField(auto_now_add=True, null=True, blank=True)
|
||||
snoozed = models.BooleanField(default=False)
|
||||
snooze_until = models.DateTimeField(null=True, blank=True)
|
||||
resolved = models.BooleanField(default=False)
|
||||
severity = models.CharField(
|
||||
max_length=100, choices=SEVERITY_CHOICES, default="info"
|
||||
resolved_on = models.DateTimeField(null=True, blank=True)
|
||||
severity = models.CharField(max_length=30, choices=SEVERITY_CHOICES, default="info")
|
||||
email_sent = models.DateTimeField(null=True, blank=True)
|
||||
resolved_email_sent = models.DateTimeField(null=True, blank=True)
|
||||
sms_sent = models.DateTimeField(null=True, blank=True)
|
||||
resolved_sms_sent = models.DateTimeField(null=True, blank=True)
|
||||
hidden = models.BooleanField(default=False)
|
||||
action_run = models.DateTimeField(null=True, blank=True)
|
||||
action_stdout = models.TextField(null=True, blank=True)
|
||||
action_stderr = models.TextField(null=True, blank=True)
|
||||
action_retcode = models.IntegerField(null=True, blank=True)
|
||||
action_execution_time = models.CharField(max_length=100, null=True, blank=True)
|
||||
resolved_action_run = models.DateTimeField(null=True, blank=True)
|
||||
resolved_action_stdout = models.TextField(null=True, blank=True)
|
||||
resolved_action_stderr = models.TextField(null=True, blank=True)
|
||||
resolved_action_retcode = models.IntegerField(null=True, blank=True)
|
||||
resolved_action_execution_time = models.CharField(
|
||||
max_length=100, null=True, blank=True
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
@classmethod
|
||||
def create_availability_alert(cls, agent):
|
||||
pass
|
||||
def resolve(self):
|
||||
self.resolved = True
|
||||
self.resolved_on = djangotime.now()
|
||||
self.snoozed = False
|
||||
self.snooze_until = None
|
||||
self.save()
|
||||
|
||||
@classmethod
|
||||
def create_check_alert(cls, check):
|
||||
def create_or_return_availability_alert(cls, agent):
|
||||
if not cls.objects.filter(agent=agent, resolved=False).exists():
|
||||
return cls.objects.create(
|
||||
agent=agent,
|
||||
alert_type="availability",
|
||||
severity="error",
|
||||
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
|
||||
hidden=True,
|
||||
)
|
||||
else:
|
||||
return cls.objects.get(agent=agent, resolved=False)
|
||||
|
||||
@classmethod
|
||||
def create_or_return_check_alert(cls, check):
|
||||
|
||||
if not cls.objects.filter(assigned_check=check, resolved=False).exists():
|
||||
return cls.objects.create(
|
||||
assigned_check=check,
|
||||
alert_type="check",
|
||||
severity=check.alert_severity,
|
||||
message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
||||
hidden=True,
|
||||
)
|
||||
else:
|
||||
return cls.objects.get(assigned_check=check, resolved=False)
|
||||
|
||||
@classmethod
|
||||
def create_or_return_task_alert(cls, task):
|
||||
|
||||
if not cls.objects.filter(assigned_task=task, resolved=False).exists():
|
||||
return cls.objects.create(
|
||||
assigned_task=task,
|
||||
alert_type="task",
|
||||
severity=task.alert_severity,
|
||||
message=f"{task.agent.hostname} has task: {task.name} that failed.",
|
||||
hidden=True,
|
||||
)
|
||||
else:
|
||||
return cls.objects.get(assigned_task=task, resolved=False)
|
||||
|
||||
@classmethod
|
||||
def handle_alert_failure(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
|
||||
# set variables
|
||||
dashboard_severities = None
|
||||
email_severities = None
|
||||
text_severities = None
|
||||
always_dashboard = None
|
||||
always_email = None
|
||||
always_text = None
|
||||
alert_interval = None
|
||||
email_task = None
|
||||
text_task = None
|
||||
|
||||
# check what the instance passed is
|
||||
if isinstance(instance, Agent):
|
||||
from agents.tasks import agent_outage_email_task, agent_outage_sms_task
|
||||
|
||||
email_task = agent_outage_email_task
|
||||
text_task = agent_outage_sms_task
|
||||
|
||||
email_alert = instance.overdue_email_alert
|
||||
text_alert = instance.overdue_text_alert
|
||||
dashboard_alert = instance.overdue_dashboard_alert
|
||||
alert_template = instance.alert_template
|
||||
maintenance_mode = instance.maintenance_mode
|
||||
alert_severity = "error"
|
||||
agent = instance
|
||||
|
||||
# set alert_template settings
|
||||
if alert_template:
|
||||
dashboard_severities = ["error"]
|
||||
email_severities = ["error"]
|
||||
text_severities = ["error"]
|
||||
always_dashboard = alert_template.agent_always_alert
|
||||
always_email = alert_template.agent_always_email
|
||||
always_text = alert_template.agent_always_text
|
||||
alert_interval = alert_template.agent_periodic_alert_days
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_availability_alert(instance)
|
||||
else:
|
||||
# check if there is an alert that exists
|
||||
if cls.objects.filter(agent=instance, resolved=False).exists():
|
||||
alert = cls.objects.get(agent=instance, resolved=False)
|
||||
else:
|
||||
alert = None
|
||||
|
||||
elif isinstance(instance, Check):
|
||||
from checks.tasks import (
|
||||
handle_check_email_alert_task,
|
||||
handle_check_sms_alert_task,
|
||||
)
|
||||
|
||||
email_task = handle_check_email_alert_task
|
||||
text_task = handle_check_sms_alert_task
|
||||
|
||||
email_alert = instance.email_alert
|
||||
text_alert = instance.text_alert
|
||||
dashboard_alert = instance.dashboard_alert
|
||||
alert_template = instance.agent.alert_template
|
||||
maintenance_mode = instance.agent.maintenance_mode
|
||||
alert_severity = instance.alert_severity
|
||||
agent = instance.agent
|
||||
|
||||
# set alert_template settings
|
||||
if alert_template:
|
||||
dashboard_severities = alert_template.check_dashboard_alert_severity
|
||||
email_severities = alert_template.check_email_alert_severity
|
||||
text_severities = alert_template.check_text_alert_severity
|
||||
always_dashboard = alert_template.check_always_alert
|
||||
always_email = alert_template.check_always_email
|
||||
always_text = alert_template.check_always_text
|
||||
alert_interval = alert_template.check_periodic_alert_days
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_check_alert(instance)
|
||||
else:
|
||||
# check if there is an alert that exists
|
||||
if cls.objects.filter(assigned_check=instance, resolved=False).exists():
|
||||
alert = cls.objects.get(assigned_check=instance, resolved=False)
|
||||
else:
|
||||
alert = None
|
||||
|
||||
elif isinstance(instance, AutomatedTask):
|
||||
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
|
||||
|
||||
email_task = handle_task_email_alert
|
||||
text_task = handle_task_sms_alert
|
||||
|
||||
email_alert = instance.email_alert
|
||||
text_alert = instance.text_alert
|
||||
dashboard_alert = instance.dashboard_alert
|
||||
alert_template = instance.agent.alert_template
|
||||
maintenance_mode = instance.agent.maintenance_mode
|
||||
alert_severity = instance.alert_severity
|
||||
agent = instance.agent
|
||||
|
||||
# set alert_template settings
|
||||
if alert_template:
|
||||
dashboard_severities = alert_template.task_dashboard_alert_severity
|
||||
email_severities = alert_template.task_email_alert_severity
|
||||
text_severities = alert_template.task_text_alert_severity
|
||||
always_dashboard = alert_template.task_always_alert
|
||||
always_email = alert_template.task_always_email
|
||||
always_text = alert_template.task_always_text
|
||||
alert_interval = alert_template.task_periodic_alert_days
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_task_alert(instance)
|
||||
else:
|
||||
# check if there is an alert that exists
|
||||
if cls.objects.filter(assigned_task=instance, resolved=False).exists():
|
||||
alert = cls.objects.get(assigned_task=instance, resolved=False)
|
||||
else:
|
||||
alert = None
|
||||
else:
|
||||
return
|
||||
|
||||
# return if agent is in maintenance mode
|
||||
if maintenance_mode or not alert:
|
||||
return
|
||||
|
||||
# check if alert severity changed on check and update the alert
|
||||
if alert_severity != alert.severity:
|
||||
alert.severity = alert_severity
|
||||
alert.save(update_fields=["severity"])
|
||||
|
||||
# create alert in dashboard if enabled
|
||||
if dashboard_alert or always_dashboard:
|
||||
|
||||
# check if alert template is set and specific severities are configured
|
||||
if alert_template and alert.severity not in dashboard_severities: # type: ignore
|
||||
pass
|
||||
else:
|
||||
alert.hidden = False
|
||||
alert.save()
|
||||
|
||||
# send email if enabled
|
||||
if email_alert or always_email:
|
||||
|
||||
# check if alert template is set and specific severities are configured
|
||||
if alert_template and alert.severity not in email_severities: # type: ignore
|
||||
pass
|
||||
else:
|
||||
email_task.delay(
|
||||
pk=alert.pk,
|
||||
alert_interval=alert_interval,
|
||||
)
|
||||
|
||||
# send text if enabled
|
||||
if text_alert or always_text:
|
||||
|
||||
# check if alert template is set and specific severities are configured
|
||||
if alert_template and alert.severity not in text_severities: # type: ignore
|
||||
pass
|
||||
else:
|
||||
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||
|
||||
# check if any scripts should be run
|
||||
if alert_template and alert_template.action and not alert.action_run:
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert_template.action_args,
|
||||
timeout=alert_template.action_timeout,
|
||||
wait=True,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
)
|
||||
|
||||
# command was successful
|
||||
if type(r) == dict:
|
||||
alert.action_retcode = r["retcode"]
|
||||
alert.action_stdout = r["stdout"]
|
||||
alert.action_stderr = r["stderr"]
|
||||
alert.action_execution_time = "{:.4f}".format(r["execution_time"])
|
||||
alert.action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname} failure alert"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def handle_alert_resolve(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
|
||||
# set variables
|
||||
email_on_resolved = False
|
||||
text_on_resolved = False
|
||||
resolved_email_task = None
|
||||
resolved_text_task = None
|
||||
|
||||
# check what the instance passed is
|
||||
if isinstance(instance, Agent):
|
||||
from agents.tasks import agent_recovery_email_task, agent_recovery_sms_task
|
||||
|
||||
resolved_email_task = agent_recovery_email_task
|
||||
resolved_text_task = agent_recovery_sms_task
|
||||
|
||||
alert_template = instance.alert_template
|
||||
alert = cls.objects.get(agent=instance, resolved=False)
|
||||
maintenance_mode = instance.maintenance_mode
|
||||
agent = instance
|
||||
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.agent_email_on_resolved
|
||||
text_on_resolved = alert_template.agent_text_on_resolved
|
||||
|
||||
elif isinstance(instance, Check):
|
||||
from checks.tasks import (
|
||||
handle_resolved_check_email_alert_task,
|
||||
handle_resolved_check_sms_alert_task,
|
||||
)
|
||||
|
||||
resolved_email_task = handle_resolved_check_email_alert_task
|
||||
resolved_text_task = handle_resolved_check_sms_alert_task
|
||||
|
||||
alert_template = instance.agent.alert_template
|
||||
alert = cls.objects.get(assigned_check=instance, resolved=False)
|
||||
maintenance_mode = instance.agent.maintenance_mode
|
||||
agent = instance.agent
|
||||
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.check_email_on_resolved
|
||||
text_on_resolved = alert_template.check_text_on_resolved
|
||||
|
||||
elif isinstance(instance, AutomatedTask):
|
||||
from autotasks.tasks import (
|
||||
handle_resolved_task_email_alert,
|
||||
handle_resolved_task_sms_alert,
|
||||
)
|
||||
|
||||
resolved_email_task = handle_resolved_task_email_alert
|
||||
resolved_text_task = handle_resolved_task_sms_alert
|
||||
|
||||
alert_template = instance.agent.alert_template
|
||||
alert = cls.objects.get(assigned_task=instance, resolved=False)
|
||||
maintenance_mode = instance.agent.maintenance_mode
|
||||
agent = instance.agent
|
||||
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.task_email_on_resolved
|
||||
text_on_resolved = alert_template.task_text_on_resolved
|
||||
|
||||
else:
|
||||
return
|
||||
|
||||
# return if agent is in maintenance mode
|
||||
if maintenance_mode:
|
||||
return
|
||||
|
||||
alert.resolve()
|
||||
|
||||
# check if a resolved email notification should be send
|
||||
if email_on_resolved and not alert.resolved_email_sent:
|
||||
resolved_email_task.delay(pk=alert.pk)
|
||||
|
||||
# check if resolved text should be sent
|
||||
if text_on_resolved and not alert.resolved_sms_sent:
|
||||
resolved_text_task.delay(pk=alert.pk)
|
||||
|
||||
# check if resolved script should be run
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.resolved_action
|
||||
and not alert.resolved_action_run
|
||||
):
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.resolved_action.pk,
|
||||
args=alert_template.resolved_action_args,
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
wait=True,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
)
|
||||
|
||||
# command was successful
|
||||
if type(r) == dict:
|
||||
alert.resolved_action_retcode = r["retcode"]
|
||||
alert.resolved_action_stdout = r["stdout"]
|
||||
alert.resolved_action_stderr = r["stderr"]
|
||||
alert.resolved_action_execution_time = "{:.4f}".format(
|
||||
r["execution_time"]
|
||||
)
|
||||
alert.resolved_action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert"
|
||||
)
|
||||
|
||||
|
||||
class AlertTemplate(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
action = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
related_name="alert_template",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
action_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
action_timeout = models.PositiveIntegerField(default=15)
|
||||
resolved_action = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
related_name="resolved_alert_template",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
resolved_action_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
resolved_action_timeout = models.PositiveIntegerField(default=15)
|
||||
|
||||
# overrides the global recipients
|
||||
email_recipients = ArrayField(
|
||||
models.CharField(max_length=100, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
text_recipients = ArrayField(
|
||||
models.CharField(max_length=100, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
|
||||
# overrides the from address
|
||||
email_from = models.EmailField(blank=True, null=True)
|
||||
|
||||
# agent alert settings
|
||||
agent_email_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
agent_text_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
agent_always_email = BooleanField(null=True, blank=True, default=None)
|
||||
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
|
||||
# check alert settings
|
||||
check_email_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
check_text_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
check_dashboard_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
check_email_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
check_text_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
check_always_email = BooleanField(null=True, blank=True, default=None)
|
||||
check_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
|
||||
# task alert settings
|
||||
task_email_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
task_text_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
task_dashboard_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
task_email_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
task_text_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
task_always_email = BooleanField(null=True, blank=True, default=None)
|
||||
task_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
|
||||
# exclusion settings
|
||||
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
||||
exclude_servers = BooleanField(null=True, blank=True, default=False)
|
||||
|
||||
excluded_sites = models.ManyToManyField(
|
||||
"clients.Site", related_name="alert_exclusions", blank=True
|
||||
)
|
||||
excluded_clients = models.ManyToManyField(
|
||||
"clients.Client", related_name="alert_exclusions", blank=True
|
||||
)
|
||||
excluded_agents = models.ManyToManyField(
|
||||
"agents.Agent", related_name="alert_exclusions", blank=True
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def has_agent_settings(self) -> bool:
|
||||
return (
|
||||
self.agent_email_on_resolved
|
||||
or self.agent_text_on_resolved
|
||||
or self.agent_always_email
|
||||
or self.agent_always_text
|
||||
or self.agent_always_alert
|
||||
or bool(self.agent_periodic_alert_days)
|
||||
)
|
||||
|
||||
@property
|
||||
def has_check_settings(self) -> bool:
|
||||
return (
|
||||
bool(self.check_email_alert_severity)
|
||||
or bool(self.check_text_alert_severity)
|
||||
or bool(self.check_dashboard_alert_severity)
|
||||
or self.check_email_on_resolved
|
||||
or self.check_text_on_resolved
|
||||
or self.check_always_email
|
||||
or self.check_always_text
|
||||
or self.check_always_alert
|
||||
or bool(self.check_periodic_alert_days)
|
||||
)
|
||||
|
||||
@property
|
||||
def has_task_settings(self) -> bool:
|
||||
return (
|
||||
bool(self.task_email_alert_severity)
|
||||
or bool(self.task_text_alert_severity)
|
||||
or bool(self.task_dashboard_alert_severity)
|
||||
or self.task_email_on_resolved
|
||||
or self.task_text_on_resolved
|
||||
or self.task_always_email
|
||||
or self.task_always_text
|
||||
or self.task_always_alert
|
||||
or bool(self.task_periodic_alert_days)
|
||||
)
|
||||
|
||||
@property
|
||||
def has_core_settings(self) -> bool:
|
||||
return bool(self.email_from) or self.email_recipients or self.text_recipients
|
||||
|
||||
@property
|
||||
def is_default_template(self) -> bool:
|
||||
return self.default_alert_template.exists() # type: ignore
|
||||
|
||||
@@ -1,19 +1,121 @@
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
DateTimeField,
|
||||
)
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||
|
||||
from .models import Alert
|
||||
from automation.serializers import PolicySerializer
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from tacticalrmm.utils import get_default_timezone
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
|
||||
|
||||
class AlertSerializer(ModelSerializer):
|
||||
|
||||
hostname = ReadOnlyField(source="agent.hostname")
|
||||
client = ReadOnlyField(source="agent.client")
|
||||
site = ReadOnlyField(source="agent.site")
|
||||
alert_time = DateTimeField(format="iso-8601")
|
||||
hostname = SerializerMethodField(read_only=True)
|
||||
client = SerializerMethodField(read_only=True)
|
||||
site = SerializerMethodField(read_only=True)
|
||||
alert_time = SerializerMethodField(read_only=True)
|
||||
resolve_on = SerializerMethodField(read_only=True)
|
||||
snoozed_until = SerializerMethodField(read_only=True)
|
||||
|
||||
def get_hostname(self, instance):
|
||||
if instance.alert_type == "availability":
|
||||
return instance.agent.hostname if instance.agent else ""
|
||||
elif instance.alert_type == "check":
|
||||
return (
|
||||
instance.assigned_check.agent.hostname
|
||||
if instance.assigned_check
|
||||
else ""
|
||||
)
|
||||
elif instance.alert_type == "task":
|
||||
return (
|
||||
instance.assigned_task.agent.hostname if instance.assigned_task else ""
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def get_client(self, instance):
|
||||
if instance.alert_type == "availability":
|
||||
return instance.agent.client.name if instance.agent else ""
|
||||
elif instance.alert_type == "check":
|
||||
return (
|
||||
instance.assigned_check.agent.client.name
|
||||
if instance.assigned_check
|
||||
else ""
|
||||
)
|
||||
elif instance.alert_type == "task":
|
||||
return (
|
||||
instance.assigned_task.agent.client.name
|
||||
if instance.assigned_task
|
||||
else ""
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def get_site(self, instance):
|
||||
if instance.alert_type == "availability":
|
||||
return instance.agent.site.name if instance.agent else ""
|
||||
elif instance.alert_type == "check":
|
||||
return (
|
||||
instance.assigned_check.agent.site.name
|
||||
if instance.assigned_check
|
||||
else ""
|
||||
)
|
||||
elif instance.alert_type == "task":
|
||||
return (
|
||||
instance.assigned_task.agent.site.name if instance.assigned_task else ""
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def get_alert_time(self, instance):
|
||||
if instance.alert_time:
|
||||
return instance.alert_time.astimezone(get_default_timezone()).timestamp()
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_resolve_on(self, instance):
|
||||
if instance.resolved_on:
|
||||
return instance.resolved_on.astimezone(get_default_timezone()).timestamp()
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_snoozed_until(self, instance):
|
||||
if instance.snooze_until:
|
||||
return instance.snooze_until.astimezone(get_default_timezone()).timestamp()
|
||||
return None
|
||||
|
||||
class Meta:
|
||||
model = Alert
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class AlertTemplateSerializer(ModelSerializer):
|
||||
agent_settings = ReadOnlyField(source="has_agent_settings")
|
||||
check_settings = ReadOnlyField(source="has_check_settings")
|
||||
task_settings = ReadOnlyField(source="has_task_settings")
|
||||
core_settings = ReadOnlyField(source="has_core_settings")
|
||||
default_template = ReadOnlyField(source="is_default_template")
|
||||
action_name = ReadOnlyField(source="action.name")
|
||||
resolved_action_name = ReadOnlyField(source="resolved_action.name")
|
||||
applied_count = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
def get_applied_count(self, instance):
|
||||
count = 0
|
||||
count += instance.policies.count()
|
||||
count += instance.clients.count()
|
||||
count += instance.sites.count()
|
||||
return count
|
||||
|
||||
|
||||
class AlertTemplateRelationSerializer(ModelSerializer):
|
||||
policies = PolicySerializer(read_only=True, many=True)
|
||||
clients = ClientSerializer(read_only=True, many=True)
|
||||
sites = SiteSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
24
api/tacticalrmm/alerts/tasks.py
Normal file
24
api/tacticalrmm/alerts/tasks.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from alerts.models import Alert
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def unsnooze_alerts() -> str:
|
||||
|
||||
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
|
||||
snoozed=False, snooze_until=None
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def cache_agents_alert_template():
|
||||
from agents.models import Agent
|
||||
|
||||
for agent in Agent.objects.only("pk"):
|
||||
agent.set_alert_template()
|
||||
|
||||
return "ok"
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,12 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("alerts/", views.GetAddAlerts.as_view()),
|
||||
path("bulk/", views.BulkAlerts.as_view()),
|
||||
path("alerts/<int:pk>/", views.GetUpdateDeleteAlert.as_view()),
|
||||
path("alerttemplates/", views.GetAddAlertTemplates.as_view()),
|
||||
path("alerttemplates/<int:pk>/", views.GetUpdateDeleteAlertTemplate.as_view()),
|
||||
path("alerttemplates/<int:pk>/related/", views.RelatedAlertTemplate.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,18 +1,103 @@
|
||||
from datetime import datetime as dt
|
||||
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from .models import Alert
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .serializers import AlertSerializer
|
||||
from .models import Alert, AlertTemplate
|
||||
from .serializers import (
|
||||
AlertSerializer,
|
||||
AlertTemplateRelationSerializer,
|
||||
AlertTemplateSerializer,
|
||||
)
|
||||
from .tasks import cache_agents_alert_template
|
||||
|
||||
|
||||
class GetAddAlerts(APIView):
|
||||
def get(self, request):
|
||||
alerts = Alert.objects.all()
|
||||
def patch(self, request):
|
||||
|
||||
# top 10 alerts for dashboard icon
|
||||
if "top" in request.data.keys():
|
||||
alerts = Alert.objects.filter(
|
||||
resolved=False, snoozed=False, hidden=False
|
||||
).order_by("alert_time")[: int(request.data["top"])]
|
||||
count = Alert.objects.filter(
|
||||
resolved=False, snoozed=False, hidden=False
|
||||
).count()
|
||||
return Response(
|
||||
{
|
||||
"alerts_count": count,
|
||||
"alerts": AlertSerializer(alerts, many=True).data,
|
||||
}
|
||||
)
|
||||
|
||||
elif any(
|
||||
key
|
||||
in [
|
||||
"timeFilter",
|
||||
"clientFilter",
|
||||
"severityFilter",
|
||||
"resolvedFilter",
|
||||
"snoozedFilter",
|
||||
]
|
||||
for key in request.data.keys()
|
||||
):
|
||||
clientFilter = Q()
|
||||
severityFilter = Q()
|
||||
timeFilter = Q()
|
||||
resolvedFilter = Q()
|
||||
snoozedFilter = Q()
|
||||
|
||||
if (
|
||||
"snoozedFilter" in request.data.keys()
|
||||
and not request.data["snoozedFilter"]
|
||||
):
|
||||
snoozedFilter = Q(snoozed=request.data["snoozedFilter"])
|
||||
|
||||
if (
|
||||
"resolvedFilter" in request.data.keys()
|
||||
and not request.data["resolvedFilter"]
|
||||
):
|
||||
resolvedFilter = Q(resolved=request.data["resolvedFilter"])
|
||||
|
||||
if "clientFilter" in request.data.keys():
|
||||
from agents.models import Agent
|
||||
from clients.models import Client
|
||||
|
||||
clients = Client.objects.filter(
|
||||
pk__in=request.data["clientFilter"]
|
||||
).values_list("id")
|
||||
agents = Agent.objects.filter(site__client_id__in=clients).values_list(
|
||||
"id"
|
||||
)
|
||||
|
||||
clientFilter = Q(agent__in=agents)
|
||||
|
||||
if "severityFilter" in request.data.keys():
|
||||
severityFilter = Q(severity__in=request.data["severityFilter"])
|
||||
|
||||
if "timeFilter" in request.data.keys():
|
||||
timeFilter = Q(
|
||||
alert_time__lte=djangotime.make_aware(dt.today()),
|
||||
alert_time__gt=djangotime.make_aware(dt.today())
|
||||
- djangotime.timedelta(days=int(request.data["timeFilter"])),
|
||||
)
|
||||
|
||||
alerts = (
|
||||
Alert.objects.filter(clientFilter)
|
||||
.filter(severityFilter)
|
||||
.filter(resolvedFilter)
|
||||
.filter(snoozedFilter)
|
||||
.filter(timeFilter)
|
||||
)
|
||||
return Response(AlertSerializer(alerts, many=True).data)
|
||||
|
||||
else:
|
||||
alerts = Alert.objects.all()
|
||||
return Response(AlertSerializer(alerts, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
@@ -32,7 +117,40 @@ class GetUpdateDeleteAlert(APIView):
|
||||
def put(self, request, pk):
|
||||
alert = get_object_or_404(Alert, pk=pk)
|
||||
|
||||
serializer = AlertSerializer(instance=alert, data=request.data, partial=True)
|
||||
data = request.data
|
||||
|
||||
if "type" in data.keys():
|
||||
if data["type"] == "resolve":
|
||||
data = {
|
||||
"resolved": True,
|
||||
"resolved_on": djangotime.now(),
|
||||
"snoozed": False,
|
||||
}
|
||||
|
||||
# unable to set snooze_until to none in serialzier
|
||||
alert.snooze_until = None
|
||||
alert.save()
|
||||
elif data["type"] == "snooze":
|
||||
if "snooze_days" in data.keys():
|
||||
data = {
|
||||
"snoozed": True,
|
||||
"snooze_until": djangotime.now()
|
||||
+ djangotime.timedelta(days=int(data["snooze_days"])),
|
||||
}
|
||||
else:
|
||||
return notify_error(
|
||||
"Missing 'snoozed_days' when trying to snooze alert"
|
||||
)
|
||||
elif data["type"] == "unsnooze":
|
||||
data = {"snoozed": False}
|
||||
|
||||
# unable to set snooze_until to none in serialzier
|
||||
alert.snooze_until = None
|
||||
alert.save()
|
||||
else:
|
||||
return notify_error("There was an error in the request data")
|
||||
|
||||
serializer = AlertSerializer(instance=alert, data=data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
@@ -42,3 +160,77 @@ class GetUpdateDeleteAlert(APIView):
|
||||
Alert.objects.get(pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class BulkAlerts(APIView):
|
||||
def post(self, request):
|
||||
if request.data["bulk_action"] == "resolve":
|
||||
Alert.objects.filter(id__in=request.data["alerts"]).update(
|
||||
resolved=True,
|
||||
resolved_on=djangotime.now(),
|
||||
snoozed=False,
|
||||
snooze_until=None,
|
||||
)
|
||||
return Response("ok")
|
||||
elif request.data["bulk_action"] == "snooze":
|
||||
if "snooze_days" in request.data.keys():
|
||||
Alert.objects.filter(id__in=request.data["alerts"]).update(
|
||||
snoozed=True,
|
||||
snooze_until=djangotime.now()
|
||||
+ djangotime.timedelta(days=int(request.data["snooze_days"])),
|
||||
)
|
||||
return Response("ok")
|
||||
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
|
||||
class GetAddAlertTemplates(APIView):
|
||||
def get(self, request):
|
||||
alert_templates = AlertTemplate.objects.all()
|
||||
|
||||
return Response(AlertTemplateSerializer(alert_templates, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
serializer = AlertTemplateSerializer(data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
# cache alert_template value on agents
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class GetUpdateDeleteAlertTemplate(APIView):
|
||||
def get(self, request, pk):
|
||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||
|
||||
return Response(AlertTemplateSerializer(alert_template).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||
|
||||
serializer = AlertTemplateSerializer(
|
||||
instance=alert_template, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
# cache alert_template value on agents
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
get_object_or_404(AlertTemplate, pk=pk).delete()
|
||||
|
||||
# cache alert_template value on agents
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class RelatedAlertTemplate(APIView):
|
||||
def get(self, request, pk):
|
||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||
return Response(AlertTemplateRelationSerializer(alert_template).data)
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import os
|
||||
import json
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from unittest.mock import patch
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class TestAPIv3(TacticalTestCase):
|
||||
@@ -17,8 +18,44 @@ class TestAPIv3(TacticalTestCase):
|
||||
def test_get_checks(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/checkrunner/"
|
||||
|
||||
# add a check
|
||||
check1 = baker.make_recipe("checks.ping_check", agent=self.agent)
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data["check_interval"], self.agent.check_interval) # type: ignore
|
||||
self.assertEqual(len(r.data["checks"]), 1) # type: ignore
|
||||
|
||||
# override check run interval
|
||||
check2 = baker.make_recipe(
|
||||
"checks.ping_check", agent=self.agent, run_interval=20
|
||||
)
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data["check_interval"], 20) # type: ignore
|
||||
self.assertEqual(len(r.data["checks"]), 2) # type: ignore
|
||||
|
||||
# Set last_run on both checks and should return an empty list
|
||||
check1.last_run = djangotime.now()
|
||||
check1.save()
|
||||
check2.last_run = djangotime.now()
|
||||
check2.save()
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data["check_interval"], 20) # type: ignore
|
||||
self.assertFalse(r.data["checks"]) # type: ignore
|
||||
|
||||
# set last_run greater than interval
|
||||
check1.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
|
||||
check1.save()
|
||||
check2.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
|
||||
check2.save()
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data["check_interval"], 20) # type: ignore
|
||||
self.assertEquals(len(r.data["checks"]), 2) # type: ignore
|
||||
|
||||
url = "/api/v3/Maj34ACb324j234asdj2n34kASDjh34-DESKTOPTEST123/checkrunner/"
|
||||
r = self.client.get(url)
|
||||
@@ -26,21 +63,6 @@ class TestAPIv3(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_mesh_info(self):
|
||||
url = f"/api/v3/{self.agent.pk}/meshinfo/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_winupdater(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/winupdater/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_sysinfo(self):
|
||||
# TODO replace this with golang wmi sample data
|
||||
|
||||
@@ -59,23 +81,6 @@ class TestAPIv3(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_hello_patch(self):
|
||||
url = "/api/v3/hello/"
|
||||
payload = {
|
||||
"agent_id": self.agent.agent_id,
|
||||
"logged_in_username": "None",
|
||||
"disks": [],
|
||||
}
|
||||
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload["logged_in_username"] = "Bob"
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_checkrunner_interval(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
|
||||
r = self.client.get(url, format="json")
|
||||
@@ -84,3 +89,117 @@ class TestAPIv3(TacticalTestCase):
|
||||
r.json(),
|
||||
{"agent": self.agent.pk, "check_interval": self.agent.check_interval},
|
||||
)
|
||||
|
||||
# add check to agent with check interval set
|
||||
check = baker.make_recipe(
|
||||
"checks.ping_check", agent=self.agent, run_interval=30
|
||||
)
|
||||
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(
|
||||
r.json(),
|
||||
{"agent": self.agent.pk, "check_interval": 30},
|
||||
)
|
||||
|
||||
# minimum check run interval is 15 seconds
|
||||
check = baker.make_recipe("checks.ping_check", agent=self.agent, run_interval=5)
|
||||
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(
|
||||
r.json(),
|
||||
{"agent": self.agent.pk, "check_interval": 15},
|
||||
)
|
||||
|
||||
def test_run_checks(self):
|
||||
# force run all checks regardless of interval
|
||||
agent = baker.make_recipe("agents.online_agent")
|
||||
baker.make_recipe("checks.ping_check", agent=agent)
|
||||
baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
baker.make_recipe("checks.cpuload_check", agent=agent)
|
||||
baker.make_recipe("checks.memory_check", agent=agent)
|
||||
baker.make_recipe("checks.eventlog_check", agent=agent)
|
||||
for _ in range(10):
|
||||
baker.make_recipe("checks.script_check", agent=agent)
|
||||
|
||||
url = f"/api/v3/{agent.agent_id}/runchecks/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.json()["agent"], agent.pk)
|
||||
self.assertIsInstance(r.json()["check_interval"], int)
|
||||
self.assertEqual(len(r.json()["checks"]), 15)
|
||||
|
||||
def test_checkin_patch(self):
|
||||
from logs.models import PendingAction
|
||||
|
||||
url = "/api/v3/checkin/"
|
||||
agent_updated = baker.make_recipe("agents.agent", version="1.3.0")
|
||||
PendingAction.objects.create(
|
||||
agent=agent_updated,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": agent_updated.winagent_dl,
|
||||
"version": agent_updated.version,
|
||||
"inno": agent_updated.win_inno_exe,
|
||||
},
|
||||
)
|
||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
# test agent failed to update and still on same version
|
||||
payload = {
|
||||
"func": "hello",
|
||||
"agent_id": agent_updated.agent_id,
|
||||
"version": "1.3.0",
|
||||
}
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
# test agent successful update
|
||||
payload["version"] = settings.LATEST_AGENT_VER
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||
self.assertEqual(action.status, "completed")
|
||||
action.delete()
|
||||
|
||||
@patch("apiv3.views.reload_nats")
|
||||
def test_agent_recovery(self, reload_nats):
|
||||
reload_nats.return_value = "ok"
|
||||
r = self.client.get("/api/v3/34jahsdkjasncASDjhg2b3j4r/recover/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
agent = baker.make_recipe("agents.online_agent")
|
||||
url = f"/api/v3/{agent.agent_id}/recovery/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json(), {"mode": "pass", "shellcmd": ""})
|
||||
reload_nats.assert_not_called()
|
||||
|
||||
baker.make("agents.RecoveryAction", agent=agent, mode="mesh")
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json(), {"mode": "mesh", "shellcmd": ""})
|
||||
reload_nats.assert_not_called()
|
||||
|
||||
baker.make(
|
||||
"agents.RecoveryAction",
|
||||
agent=agent,
|
||||
mode="command",
|
||||
command="shutdown /r /t 5 /f",
|
||||
)
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(
|
||||
r.json(), {"mode": "command", "shellcmd": "shutdown /r /t 5 /f"}
|
||||
)
|
||||
reload_nats.assert_not_called()
|
||||
|
||||
baker.make("agents.RecoveryAction", agent=agent, mode="rpc")
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json(), {"mode": "rpc", "shellcmd": ""})
|
||||
reload_nats.assert_called_once()
|
||||
|
||||
@@ -1,19 +1,23 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("checkin/", views.CheckIn.as_view()),
|
||||
path("hello/", views.Hello.as_view()),
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/runchecks/", views.RunChecks.as_view()),
|
||||
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
|
||||
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("<int:pk>/meshinfo/", views.MeshInfo.as_view()),
|
||||
path("meshexe/", views.MeshExe.as_view()),
|
||||
path("sysinfo/", views.SysInfo.as_view()),
|
||||
path("newagent/", views.NewAgent.as_view()),
|
||||
path("winupdater/", views.WinUpdater.as_view()),
|
||||
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
|
||||
path("software/", views.Software.as_view()),
|
||||
path("installer/", views.Installer.as_view()),
|
||||
path("checkin/", views.CheckIn.as_view()),
|
||||
path("syncmesh/", views.SyncMeshNodeID.as_view()),
|
||||
path("choco/", views.Choco.as_view()),
|
||||
path("winupdates/", views.WinUpdates.as_view()),
|
||||
path("superseded/", views.SupersededWinUpdate.as_view()),
|
||||
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
|
||||
path("<str:agentid>/recovery/", views.AgentRecovery.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,87 +1,72 @@
|
||||
import asyncio
|
||||
import os
|
||||
import requests
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from django.http import HttpResponse
|
||||
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
from accounts.models import User
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from software.models import InstalledSoftware
|
||||
from checks.serializers import CheckRunnerGetSerializerV3
|
||||
from agents.models import Agent
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
from winupdate.serializers import ApprovedUpdateSerializer
|
||||
|
||||
from agents.tasks import (
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
)
|
||||
from checks.models import Check
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from checks.utils import bytes2human
|
||||
from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList
|
||||
from logs.models import PendingAction
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class CheckIn(APIView):
|
||||
"""
|
||||
The agent's checkin endpoint
|
||||
patch: called every 45 to 110 seconds, handles agent updates and recovery
|
||||
put: called every 5 to 10 minutes, handles basic system info
|
||||
post: called once on windows service startup
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request):
|
||||
from alerts.models import Alert
|
||||
|
||||
updated = False
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if pyver.parse(request.data["version"]) > pyver.parse(
|
||||
agent.version
|
||||
) or pyver.parse(request.data["version"]) == pyver.parse(
|
||||
settings.LATEST_AGENT_VER
|
||||
):
|
||||
updated = True
|
||||
agent.version = request.data["version"]
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["version", "last_seen"])
|
||||
|
||||
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
|
||||
last_outage = agent.agentoutages.last()
|
||||
last_outage.recovery_time = djangotime.now()
|
||||
last_outage.save(update_fields=["recovery_time"])
|
||||
|
||||
if agent.overdue_email_alert:
|
||||
agent_recovery_email_task.delay(pk=last_outage.pk)
|
||||
if agent.overdue_text_alert:
|
||||
agent_recovery_sms_task.delay(pk=last_outage.pk)
|
||||
|
||||
recovery = agent.recoveryactions.filter(last_run=None).last()
|
||||
if recovery is not None:
|
||||
recovery.last_run = djangotime.now()
|
||||
recovery.save(update_fields=["last_run"])
|
||||
return Response(recovery.send())
|
||||
|
||||
# handle agent update
|
||||
if agent.pendingactions.filter(
|
||||
# change agent update pending status to completed if agent has just updated
|
||||
if (
|
||||
updated
|
||||
and agent.pendingactions.filter( # type: ignore
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
update = agent.pendingactions.filter(
|
||||
).exists()
|
||||
):
|
||||
agent.pendingactions.filter( # type: ignore
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
update.status = "completed"
|
||||
update.save(update_fields=["status"])
|
||||
return Response(update.details)
|
||||
).update(status="completed")
|
||||
|
||||
# handles any alerting actions
|
||||
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
||||
Alert.handle_alert_resolve(agent)
|
||||
|
||||
# get any pending actions
|
||||
if agent.pendingactions.filter(status="pending").exists():
|
||||
if agent.pendingactions.filter(status="pending").exists(): # type: ignore
|
||||
agent.handle_pending_actions()
|
||||
|
||||
return Response("ok")
|
||||
@@ -89,20 +74,13 @@ class CheckIn(APIView):
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
if "disks" in request.data.keys():
|
||||
if request.data["func"] == "disks":
|
||||
disks = request.data["disks"]
|
||||
new = []
|
||||
# python agent
|
||||
if isinstance(disks, dict):
|
||||
for k, v in disks.items():
|
||||
new.append(v)
|
||||
else:
|
||||
# golang agent
|
||||
for disk in disks:
|
||||
tmp = {}
|
||||
for k, v in disk.items():
|
||||
for _, _ in disk.items():
|
||||
tmp["device"] = disk["device"]
|
||||
tmp["fstype"] = disk["fstype"]
|
||||
tmp["total"] = bytes2human(disk["total"])
|
||||
@@ -111,128 +89,224 @@ class CheckIn(APIView):
|
||||
tmp["percent"] = int(disk["percent"])
|
||||
new.append(tmp)
|
||||
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(disks=new)
|
||||
return Response("ok")
|
||||
|
||||
if "logged_in_username" in request.data.keys():
|
||||
if request.data["func"] == "loggedonuser":
|
||||
if request.data["logged_in_username"] != "None":
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
||||
return Response("ok")
|
||||
|
||||
if request.data["func"] == "software":
|
||||
raw: SoftwareList = request.data["software"]
|
||||
if not isinstance(raw, list):
|
||||
return notify_error("err")
|
||||
|
||||
sw = filter_software(raw)
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
s = agent.installedsoftware_set.first() # type: ignore
|
||||
s.software = sw
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("ok")
|
||||
|
||||
# called once during tacticalagent windows service startup
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if not agent.choco_installed:
|
||||
asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False))
|
||||
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_seen=djangotime.now())
|
||||
time.sleep(0.5)
|
||||
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Hello(APIView):
|
||||
#### DEPRECATED, for agents <= 1.1.9 ####
|
||||
"""
|
||||
The agent's checkin endpoint
|
||||
patch: called every 30 to 120 seconds
|
||||
post: called on agent windows service startup
|
||||
"""
|
||||
|
||||
class SyncMeshNodeID(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if agent.mesh_node_id != request.data["nodeid"]:
|
||||
agent.mesh_node_id = request.data["nodeid"]
|
||||
agent.save(update_fields=["mesh_node_id"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Choco(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
agent.choco_installed = request.data["installed"]
|
||||
agent.save(update_fields=["choco_installed"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class WinUpdates(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
reboot_policy: str = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["needs_reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
disks = request.data["disks"]
|
||||
new = []
|
||||
# python agent
|
||||
if isinstance(disks, dict):
|
||||
for k, v in disks.items():
|
||||
new.append(v)
|
||||
else:
|
||||
# golang agent
|
||||
for disk in disks:
|
||||
tmp = {}
|
||||
for k, v in disk.items():
|
||||
tmp["device"] = disk["device"]
|
||||
tmp["fstype"] = disk["fstype"]
|
||||
tmp["total"] = bytes2human(disk["total"])
|
||||
tmp["used"] = bytes2human(disk["used"])
|
||||
tmp["free"] = bytes2human(disk["free"])
|
||||
tmp["percent"] = int(disk["percent"])
|
||||
new.append(tmp)
|
||||
|
||||
if request.data["logged_in_username"] == "None":
|
||||
serializer.save(last_seen=djangotime.now(), disks=new)
|
||||
else:
|
||||
serializer.save(
|
||||
last_seen=djangotime.now(),
|
||||
disks=new,
|
||||
last_logged_in_user=request.data["logged_in_username"],
|
||||
u = agent.winupdates.filter(guid=request.data["guid"]).last() # type: ignore
|
||||
success: bool = request.data["success"]
|
||||
if success:
|
||||
u.result = "success"
|
||||
u.downloaded = True
|
||||
u.installed = True
|
||||
u.date_installed = djangotime.now()
|
||||
u.save(
|
||||
update_fields=[
|
||||
"result",
|
||||
"downloaded",
|
||||
"installed",
|
||||
"date_installed",
|
||||
]
|
||||
)
|
||||
else:
|
||||
u.result = "failed"
|
||||
u.save(update_fields=["result"])
|
||||
|
||||
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
|
||||
last_outage = agent.agentoutages.last()
|
||||
last_outage.recovery_time = djangotime.now()
|
||||
last_outage.save(update_fields=["recovery_time"])
|
||||
|
||||
if agent.overdue_email_alert:
|
||||
agent_recovery_email_task.delay(pk=last_outage.pk)
|
||||
if agent.overdue_text_alert:
|
||||
agent_recovery_sms_task.delay(pk=last_outage.pk)
|
||||
|
||||
recovery = agent.recoveryactions.filter(last_run=None).last()
|
||||
if recovery is not None:
|
||||
recovery.last_run = djangotime.now()
|
||||
recovery.save(update_fields=["last_run"])
|
||||
return Response(recovery.send())
|
||||
|
||||
# handle agent update
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
update = agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
update.status = "completed"
|
||||
update.save(update_fields=["status"])
|
||||
return Response(update.details)
|
||||
|
||||
# get any pending actions
|
||||
if agent.pendingactions.filter(status="pending").exists():
|
||||
agent.handle_pending_actions()
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
updates = request.data["wua_updates"]
|
||||
for update in updates:
|
||||
if agent.winupdates.filter(guid=update["guid"]).exists(): # type: ignore
|
||||
u = agent.winupdates.filter(guid=update["guid"]).last() # type: ignore
|
||||
u.downloaded = update["downloaded"]
|
||||
u.installed = update["installed"]
|
||||
u.save(update_fields=["downloaded", "installed"])
|
||||
else:
|
||||
try:
|
||||
kb = "KB" + update["kb_article_ids"][0]
|
||||
except:
|
||||
continue
|
||||
|
||||
WinUpdate(
|
||||
agent=agent,
|
||||
guid=update["guid"],
|
||||
kb=kb,
|
||||
title=update["title"],
|
||||
installed=update["installed"],
|
||||
downloaded=update["downloaded"],
|
||||
description=update["description"],
|
||||
severity=update["severity"],
|
||||
categories=update["categories"],
|
||||
category_ids=update["category_ids"],
|
||||
kb_article_ids=update["kb_article_ids"],
|
||||
more_info_urls=update["more_info_urls"],
|
||||
support_url=update["support_url"],
|
||||
revision_number=update["revision_number"],
|
||||
).save()
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
|
||||
# more superseded updates cleanup
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.4.2"):
|
||||
for u in agent.winupdates.filter( # type: ignore
|
||||
date_installed__isnull=True, result="failed"
|
||||
).exclude(installed=True):
|
||||
u.delete()
|
||||
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_seen=djangotime.now())
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
"""
|
||||
For the windows golang agent
|
||||
"""
|
||||
class SupersededWinUpdate(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
updates = agent.winupdates.filter(guid=request.data["guid"]) # type: ignore
|
||||
for u in updates:
|
||||
u.delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class RunChecks(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializerV3(checks, many=True).data,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
checks = agent.agentchecks.filter(overriden_by_policy=False) # type: ignore
|
||||
|
||||
run_list = [
|
||||
check
|
||||
for check in checks
|
||||
# always run if check hasn't run yet
|
||||
if not check.last_run
|
||||
# if a check interval is set, see if the correct amount of seconds have passed
|
||||
or (
|
||||
check.run_interval
|
||||
and (
|
||||
check.last_run
|
||||
< djangotime.now()
|
||||
- djangotime.timedelta(seconds=check.run_interval)
|
||||
)
|
||||
# if check interval isn't set, make sure the agent's check interval has passed before running
|
||||
)
|
||||
or (
|
||||
check.last_run
|
||||
< djangotime.now() - djangotime.timedelta(seconds=agent.check_interval)
|
||||
)
|
||||
]
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_run_interval(),
|
||||
"checks": CheckRunnerGetSerializer(run_list, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
@@ -251,14 +325,13 @@ class CheckRunnerInterval(APIView):
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
return Response({"agent": agent.pk, "check_interval": agent.check_interval})
|
||||
|
||||
return Response(
|
||||
{"agent": agent.pk, "check_interval": agent.check_run_interval()}
|
||||
)
|
||||
|
||||
|
||||
class TaskRunner(APIView):
|
||||
"""
|
||||
For the windows golang agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
@@ -268,6 +341,7 @@ class TaskRunner(APIView):
|
||||
return Response(TaskGOGetSerializer(task).data)
|
||||
|
||||
def patch(self, request, pk, agentid):
|
||||
from alerts.models import Alert
|
||||
from logs.models import AuditLog
|
||||
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
@@ -279,7 +353,18 @@ class TaskRunner(APIView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
|
||||
new_task = AutomatedTask.objects.get(pk=task.pk)
|
||||
status = "failing" if task.retcode != 0 else "passing"
|
||||
|
||||
new_task: AutomatedTask = AutomatedTask.objects.get(pk=task.pk)
|
||||
new_task.status = status
|
||||
new_task.save()
|
||||
|
||||
if status == "passing":
|
||||
if Alert.objects.filter(assigned_task=new_task, resolved=False).exists():
|
||||
Alert.handle_alert_resolve(new_task)
|
||||
else:
|
||||
Alert.handle_alert_failure(new_task)
|
||||
|
||||
AuditLog.objects.create(
|
||||
username=agent.hostname,
|
||||
agent=agent.hostname,
|
||||
@@ -292,74 +377,6 @@ class TaskRunner(APIView):
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class WinUpdater(APIView):
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent.delete_superseded_updates()
|
||||
patches = agent.winupdates.filter(action="approve").exclude(installed=True)
|
||||
return Response(ApprovedUpdateSerializer(patches, many=True).data)
|
||||
|
||||
# agent sends patch results as it's installing them
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
kb = request.data["kb"]
|
||||
results = request.data["results"]
|
||||
update = agent.winupdates.get(kb=kb)
|
||||
|
||||
if results == "error" or results == "failed":
|
||||
update.result = results
|
||||
update.save(update_fields=["result"])
|
||||
elif results == "success":
|
||||
update.result = "success"
|
||||
update.downloaded = True
|
||||
update.installed = True
|
||||
update.date_installed = djangotime.now()
|
||||
update.save(
|
||||
update_fields=[
|
||||
"result",
|
||||
"downloaded",
|
||||
"installed",
|
||||
"date_installed",
|
||||
]
|
||||
)
|
||||
elif results == "alreadyinstalled":
|
||||
update.result = "success"
|
||||
update.downloaded = True
|
||||
update.installed = True
|
||||
update.save(update_fields=["result", "downloaded", "installed"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
# agent calls this after it's finished installing all patches
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
reboot_policy = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
logger.info(
|
||||
f"{agent.hostname} is rebooting after updates were installed."
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SysInfo(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
@@ -375,29 +392,6 @@ class SysInfo(APIView):
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class MeshInfo(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
return Response(agent.mesh_node_id)
|
||||
|
||||
def patch(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
|
||||
if "nodeidhex" in request.data:
|
||||
# agent <= 1.1.0
|
||||
nodeid = request.data["nodeidhex"]
|
||||
else:
|
||||
# agent >= 1.1.1
|
||||
nodeid = request.data["nodeid"]
|
||||
|
||||
agent.mesh_node_id = nodeid
|
||||
agent.save(update_fields=["mesh_node_id"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class MeshExe(APIView):
|
||||
""" Sends the mesh exe to the installer """
|
||||
|
||||
@@ -447,10 +441,10 @@ class NewAgent(APIView):
|
||||
agent.salt_id = f"{agent.hostname}-{agent.pk}"
|
||||
agent.save(update_fields=["salt_id"])
|
||||
|
||||
user = User.objects.create_user(
|
||||
user = User.objects.create_user( # type: ignore
|
||||
username=request.data["agent_id"],
|
||||
agent=agent,
|
||||
password=User.objects.make_random_password(60),
|
||||
password=User.objects.make_random_password(60), # type: ignore
|
||||
)
|
||||
|
||||
token = Token.objects.create(user=user)
|
||||
@@ -462,10 +456,6 @@ class NewAgent(APIView):
|
||||
|
||||
reload_nats()
|
||||
|
||||
# Generate policies for new agent
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
# create agent install audit record
|
||||
AuditLog.objects.create(
|
||||
username=request.user,
|
||||
@@ -499,7 +489,7 @@ class Software(APIView):
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
s = agent.installedsoftware_set.first()
|
||||
s = agent.installedsoftware_set.first() # type: ignore
|
||||
s.software = sw
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
@@ -522,3 +512,59 @@ class Installer(APIView):
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class ChocoResult(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request, pk):
|
||||
action = get_object_or_404(PendingAction, pk=pk)
|
||||
results: str = request.data["results"]
|
||||
|
||||
software_name = action.details["name"].lower()
|
||||
success = [
|
||||
"install",
|
||||
"of",
|
||||
software_name,
|
||||
"was",
|
||||
"successful",
|
||||
"installed",
|
||||
]
|
||||
duplicate = [software_name, "already", "installed", "--force", "reinstall"]
|
||||
installed = False
|
||||
|
||||
if all(x in results.lower() for x in success):
|
||||
installed = True
|
||||
elif all(x in results.lower() for x in duplicate):
|
||||
installed = True
|
||||
|
||||
action.details["output"] = results
|
||||
action.details["installed"] = installed
|
||||
action.status = "completed"
|
||||
action.save(update_fields=["details", "status"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class AgentRecovery(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
recovery = agent.recoveryactions.filter(last_run=None).last() # type: ignore
|
||||
ret = {"mode": "pass", "shellcmd": ""}
|
||||
if recovery is None:
|
||||
return Response(ret)
|
||||
|
||||
recovery.last_run = djangotime.now()
|
||||
recovery.save(update_fields=["last_run"])
|
||||
|
||||
ret["mode"] = recovery.mode
|
||||
|
||||
if recovery.mode == "command":
|
||||
ret["shellcmd"] = recovery.command
|
||||
elif recovery.mode == "rpc":
|
||||
reload_nats()
|
||||
|
||||
return Response(ret)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.0.6 on 2020-06-04 17:13
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-12 14:08
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0004_auto_20210212_1408'),
|
||||
('automation', '0006_delete_policyexclusions'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='policy',
|
||||
name='alert_template',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='policies', to='alerts.alerttemplate'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,30 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-02 04:15
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0030_agent_offline_time'),
|
||||
('clients', '0009_auto_20210212_1408'),
|
||||
('automation', '0007_policy_alert_template'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='policy',
|
||||
name='excluded_agents',
|
||||
field=models.ManyToManyField(blank=True, related_name='policy_exclusions', to='agents.Agent'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='policy',
|
||||
name='excluded_clients',
|
||||
field=models.ManyToManyField(blank=True, related_name='policy_exclusions', to='clients.Client'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='policy',
|
||||
name='excluded_sites',
|
||||
field=models.ManyToManyField(blank=True, related_name='policy_exclusions', to='clients.Site'),
|
||||
),
|
||||
]
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.db import models
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
@@ -9,31 +10,96 @@ class Policy(BaseAuditModel):
|
||||
desc = models.CharField(max_length=255, null=True, blank=True)
|
||||
active = models.BooleanField(default=False)
|
||||
enforced = models.BooleanField(default=False)
|
||||
alert_template = models.ForeignKey(
|
||||
"alerts.AlertTemplate",
|
||||
related_name="policies",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
excluded_sites = models.ManyToManyField(
|
||||
"clients.Site", related_name="policy_exclusions", blank=True
|
||||
)
|
||||
excluded_clients = models.ManyToManyField(
|
||||
"clients.Client", related_name="policy_exclusions", blank=True
|
||||
)
|
||||
excluded_agents = models.ManyToManyField(
|
||||
"agents.Agent", related_name="policy_exclusions", blank=True
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_from_policies_task
|
||||
|
||||
# get old policy if exists
|
||||
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
# generate agent checks only if active and enforced were changed
|
||||
if old_policy:
|
||||
if old_policy.active != self.active or old_policy.enforced != self.enforced:
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
policypk=self.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if old_policy.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
|
||||
generate_agent_checks_task.delay(agents, create_tasks=True)
|
||||
|
||||
@property
|
||||
def is_default_server_policy(self):
|
||||
return self.default_server_policy.exists()
|
||||
return self.default_server_policy.exists() # type: ignore
|
||||
|
||||
@property
|
||||
def is_default_workstation_policy(self):
|
||||
return self.default_workstation_policy.exists()
|
||||
return self.default_workstation_policy.exists() # type: ignore
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def is_agent_excluded(self, agent):
|
||||
return (
|
||||
agent in self.excluded_agents.all()
|
||||
or agent.site in self.excluded_sites.all()
|
||||
or agent.client in self.excluded_clients.all()
|
||||
)
|
||||
|
||||
def related_agents(self):
|
||||
return self.get_related("server") | self.get_related("workstation")
|
||||
|
||||
def get_related(self, mon_type):
|
||||
explicit_agents = self.agents.filter(monitoring_type=mon_type)
|
||||
explicit_clients = getattr(self, f"{mon_type}_clients").all()
|
||||
explicit_sites = getattr(self, f"{mon_type}_sites").all()
|
||||
explicit_agents = (
|
||||
self.agents.filter(monitoring_type=mon_type) # type: ignore
|
||||
.exclude(
|
||||
pk__in=self.excluded_agents.only("pk").values_list("pk", flat=True)
|
||||
)
|
||||
.exclude(site__in=self.excluded_sites.all())
|
||||
.exclude(site__client__in=self.excluded_clients.all())
|
||||
)
|
||||
|
||||
explicit_clients = getattr(self, f"{mon_type}_clients").exclude(
|
||||
pk__in=self.excluded_clients.all()
|
||||
)
|
||||
explicit_sites = getattr(self, f"{mon_type}_sites").exclude(
|
||||
pk__in=self.excluded_sites.all()
|
||||
)
|
||||
|
||||
filtered_agents_pks = Policy.objects.none()
|
||||
|
||||
filtered_agents_pks |= Agent.objects.filter(
|
||||
site__in=[
|
||||
site for site in explicit_sites if site.client not in explicit_clients
|
||||
site
|
||||
for site in explicit_sites
|
||||
if site.client not in explicit_clients
|
||||
and site.client not in self.excluded_clients.all()
|
||||
],
|
||||
monitoring_type=mon_type,
|
||||
).values_list("pk", flat=True)
|
||||
@@ -57,9 +123,8 @@ class Policy(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def cascade_policy_tasks(agent):
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
from logs.models import PendingAction
|
||||
|
||||
# List of all tasks to be applied
|
||||
@@ -89,23 +154,39 @@ class Policy(BaseAuditModel):
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
|
||||
if agent_policy and agent_policy.active:
|
||||
if (
|
||||
agent_policy
|
||||
and agent_policy.active
|
||||
and not agent_policy.is_agent_excluded(agent)
|
||||
):
|
||||
for task in agent_policy.autotasks.all():
|
||||
if task.pk not in added_task_pks:
|
||||
tasks.append(task)
|
||||
added_task_pks.append(task.pk)
|
||||
if site_policy and site_policy.active:
|
||||
if (
|
||||
site_policy
|
||||
and site_policy.active
|
||||
and not site_policy.is_agent_excluded(agent)
|
||||
):
|
||||
for task in site_policy.autotasks.all():
|
||||
if task.pk not in added_task_pks:
|
||||
tasks.append(task)
|
||||
added_task_pks.append(task.pk)
|
||||
if client_policy and client_policy.active:
|
||||
if (
|
||||
client_policy
|
||||
and client_policy.active
|
||||
and not client_policy.is_agent_excluded(agent)
|
||||
):
|
||||
for task in client_policy.autotasks.all():
|
||||
if task.pk not in added_task_pks:
|
||||
tasks.append(task)
|
||||
added_task_pks.append(task.pk)
|
||||
|
||||
if default_policy and default_policy.active:
|
||||
if (
|
||||
default_policy
|
||||
and default_policy.active
|
||||
and not default_policy.is_agent_excluded(agent)
|
||||
):
|
||||
for task in default_policy.autotasks.all():
|
||||
if task.pk not in added_task_pks:
|
||||
tasks.append(task)
|
||||
@@ -122,7 +203,9 @@ class Policy(BaseAuditModel):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
|
||||
# handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline
|
||||
for action in agent.pendingactions.exclude(status="completed"):
|
||||
for action in agent.pendingactions.filter(action_type="taskaction").exclude(
|
||||
status="completed"
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=action.details["task_id"])
|
||||
if (
|
||||
task.parent_task in agent_tasks_parent_pks
|
||||
@@ -173,7 +256,11 @@ class Policy(BaseAuditModel):
|
||||
enforced_checks = list()
|
||||
policy_checks = list()
|
||||
|
||||
if agent_policy and agent_policy.active:
|
||||
if (
|
||||
agent_policy
|
||||
and agent_policy.active
|
||||
and not agent_policy.is_agent_excluded(agent)
|
||||
):
|
||||
if agent_policy.enforced:
|
||||
for check in agent_policy.policychecks.all():
|
||||
enforced_checks.append(check)
|
||||
@@ -181,7 +268,11 @@ class Policy(BaseAuditModel):
|
||||
for check in agent_policy.policychecks.all():
|
||||
policy_checks.append(check)
|
||||
|
||||
if site_policy and site_policy.active:
|
||||
if (
|
||||
site_policy
|
||||
and site_policy.active
|
||||
and not site_policy.is_agent_excluded(agent)
|
||||
):
|
||||
if site_policy.enforced:
|
||||
for check in site_policy.policychecks.all():
|
||||
enforced_checks.append(check)
|
||||
@@ -189,7 +280,11 @@ class Policy(BaseAuditModel):
|
||||
for check in site_policy.policychecks.all():
|
||||
policy_checks.append(check)
|
||||
|
||||
if client_policy and client_policy.active:
|
||||
if (
|
||||
client_policy
|
||||
and client_policy.active
|
||||
and not client_policy.is_agent_excluded(agent)
|
||||
):
|
||||
if client_policy.enforced:
|
||||
for check in client_policy.policychecks.all():
|
||||
enforced_checks.append(check)
|
||||
@@ -197,7 +292,11 @@ class Policy(BaseAuditModel):
|
||||
for check in client_policy.policychecks.all():
|
||||
policy_checks.append(check)
|
||||
|
||||
if default_policy and default_policy.active:
|
||||
if (
|
||||
default_policy
|
||||
and default_policy.active
|
||||
and not default_policy.is_agent_excluded(agent)
|
||||
):
|
||||
if default_policy.enforced:
|
||||
for check in default_policy.policychecks.all():
|
||||
enforced_checks.append(check)
|
||||
|
||||
@@ -1,20 +1,18 @@
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
SerializerMethodField,
|
||||
StringRelatedField,
|
||||
ReadOnlyField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
|
||||
from .models import Policy
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from clients.models import Client, Site
|
||||
from clients.models import Client
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Policy
|
||||
|
||||
|
||||
class PolicySerializer(ModelSerializer):
|
||||
class Meta:
|
||||
@@ -24,15 +22,14 @@ class PolicySerializer(ModelSerializer):
|
||||
|
||||
class PolicyTableSerializer(ModelSerializer):
|
||||
|
||||
server_clients = ClientSerializer(many=True, read_only=True)
|
||||
server_sites = SiteSerializer(many=True, read_only=True)
|
||||
workstation_clients = ClientSerializer(many=True, read_only=True)
|
||||
workstation_sites = SiteSerializer(many=True, read_only=True)
|
||||
agents = AgentHostnameSerializer(many=True, read_only=True)
|
||||
default_server_policy = ReadOnlyField(source="is_default_server_policy")
|
||||
default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy")
|
||||
agents_count = SerializerMethodField(read_only=True)
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
alert_template = ReadOnlyField(source="alert_template.id")
|
||||
excluded_clients = ClientSerializer(many=True)
|
||||
excluded_sites = SiteSerializer(many=True)
|
||||
excluded_agents = AgentHostnameSerializer(many=True)
|
||||
|
||||
class Meta:
|
||||
model = Policy
|
||||
@@ -78,49 +75,16 @@ class PolicyCheckSerializer(ModelSerializer):
|
||||
"assignedtask",
|
||||
"text_alert",
|
||||
"email_alert",
|
||||
"dashboard_alert",
|
||||
)
|
||||
depth = 1
|
||||
|
||||
|
||||
class AutoTasksFieldSerializer(ModelSerializer):
|
||||
assigned_check = PolicyCheckSerializer(read_only=True)
|
||||
script = ReadOnlyField(source="script.id")
|
||||
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = ("id", "enabled", "name", "schedule", "assigned_check")
|
||||
depth = 1
|
||||
|
||||
|
||||
class AutoTaskPolicySerializer(ModelSerializer):
|
||||
|
||||
autotasks = AutoTasksFieldSerializer(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Policy
|
||||
fields = (
|
||||
"id",
|
||||
"name",
|
||||
"autotasks",
|
||||
)
|
||||
depth = 2
|
||||
|
||||
|
||||
class RelatedClientPolicySerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = ("workstation_policy", "server_policy")
|
||||
depth = 1
|
||||
|
||||
|
||||
class RelatedSitePolicySerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = ("workstation_policy", "server_policy")
|
||||
depth = 1
|
||||
|
||||
|
||||
class RelatedAgentPolicySerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = ("policy",)
|
||||
fields = "__all__"
|
||||
depth = 1
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
from automation.models import Policy
|
||||
from checks.models import Check
|
||||
from agents.models import Agent
|
||||
|
||||
from automation.models import Policy
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on agents affected by a policy and optionally generate automated tasks
|
||||
def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
@@ -21,7 +22,7 @@ def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents()
|
||||
agents = policy.related_agents().only("pk")
|
||||
|
||||
for agent in agents:
|
||||
agent.generate_checks_from_policies()
|
||||
@@ -30,6 +31,17 @@ def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on a list of agents and optionally generate automated tasks
|
||||
def generate_agent_checks_task(agentpks, create_tasks=False):
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on agent servers or workstations within a certain client or site and optionally generate automated tasks
|
||||
def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False):
|
||||
|
||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||
@@ -40,6 +52,7 @@ def generate_agent_checks_by_location_task(location, mon_type, create_tasks=Fals
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on all agent servers or workstations and optionally generate automated tasks
|
||||
def generate_all_agent_checks_task(mon_type, create_tasks=False):
|
||||
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies()
|
||||
@@ -49,22 +62,31 @@ def generate_all_agent_checks_task(mon_type, create_tasks=False):
|
||||
|
||||
|
||||
@app.task
|
||||
# deletes a policy managed check from all agents
|
||||
def delete_policy_check_task(checkpk):
|
||||
|
||||
Check.objects.filter(parent_check=checkpk).delete()
|
||||
|
||||
|
||||
@app.task
|
||||
# updates policy managed check fields on agents
|
||||
def update_policy_check_fields_task(checkpk):
|
||||
|
||||
check = Check.objects.get(pk=checkpk)
|
||||
|
||||
Check.objects.filter(parent_check=checkpk).update(
|
||||
threshold=check.threshold,
|
||||
warning_threshold=check.warning_threshold,
|
||||
error_threshold=check.error_threshold,
|
||||
alert_severity=check.alert_severity,
|
||||
name=check.name,
|
||||
run_interval=check.run_interval,
|
||||
disk=check.disk,
|
||||
fails_b4_alert=check.fails_b4_alert,
|
||||
ip=check.ip,
|
||||
script=check.script,
|
||||
script_args=check.script_args,
|
||||
info_return_codes=check.info_return_codes,
|
||||
warning_return_codes=check.warning_return_codes,
|
||||
timeout=check.timeout,
|
||||
pass_if_start_pending=check.pass_if_start_pending,
|
||||
pass_if_svc_not_exist=check.pass_if_svc_not_exist,
|
||||
@@ -77,12 +99,15 @@ def update_policy_check_fields_task(checkpk):
|
||||
event_message=check.event_message,
|
||||
fail_when=check.fail_when,
|
||||
search_last_days=check.search_last_days,
|
||||
number_of_events_b4_alert=check.number_of_events_b4_alert,
|
||||
email_alert=check.email_alert,
|
||||
text_alert=check.text_alert,
|
||||
dashboard_alert=check.dashboard_alert,
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy tasks on agents affected by a policy
|
||||
def generate_agent_tasks_from_policies_task(policypk):
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
@@ -98,23 +123,16 @@ def generate_agent_tasks_from_policies_task(policypk):
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents()
|
||||
agents = policy.related_agents().only("pk")
|
||||
|
||||
for agent in agents:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_tasks_by_location_task(location, mon_type):
|
||||
|
||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def delete_policy_autotask_task(taskpk):
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
|
||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
@@ -129,13 +147,23 @@ def run_win_policy_autotask_task(task_pks):
|
||||
|
||||
|
||||
@app.task
|
||||
def update_policy_task_fields_task(taskpk, enabled):
|
||||
from autotasks.models import AutomatedTask
|
||||
def update_policy_task_fields_task(taskpk, update_agent=False):
|
||||
from autotasks.tasks import enable_or_disable_win_task
|
||||
|
||||
tasks = AutomatedTask.objects.filter(parent_task=taskpk)
|
||||
task = AutomatedTask.objects.get(pk=taskpk)
|
||||
|
||||
tasks.update(enabled=enabled)
|
||||
AutomatedTask.objects.filter(parent_task=taskpk).update(
|
||||
alert_severity=task.alert_severity,
|
||||
email_alert=task.email_alert,
|
||||
text_alert=task.text_alert,
|
||||
dashboard_alert=task.dashboard_alert,
|
||||
script=task.script,
|
||||
script_args=task.script_args,
|
||||
name=task.name,
|
||||
timeout=task.timeout,
|
||||
enabled=task.enabled,
|
||||
)
|
||||
|
||||
for autotask in tasks:
|
||||
enable_or_disable_win_task(autotask.pk, enabled)
|
||||
if update_agent:
|
||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
||||
enable_or_disable_win_task.delay(task.pk, task.enabled)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,13 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("policies/", views.GetAddPolicies.as_view()),
|
||||
path("policies/<int:pk>/related/", views.GetRelated.as_view()),
|
||||
path("related/", views.GetRelated.as_view()),
|
||||
path("policies/overview/", views.OverviewPolicy.as_view()),
|
||||
path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()),
|
||||
path("sync/", views.PolicySync.as_view()),
|
||||
path("<int:pk>/policychecks/", views.PolicyCheck.as_view()),
|
||||
path("<int:pk>/policyautomatedtasks/", views.PolicyAutoTask.as_view()),
|
||||
path("policycheckstatus/<int:check>/check/", views.PolicyCheck.as_view()),
|
||||
|
||||
@@ -1,39 +1,28 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from .models import Policy
|
||||
from agents.models import Agent
|
||||
from clients.models import Client, Site
|
||||
from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from clients.models import Client
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from tacticalrmm.utils import notify_error
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Policy
|
||||
from .serializers import (
|
||||
AutoTasksFieldSerializer,
|
||||
PolicyCheckSerializer,
|
||||
PolicyCheckStatusSerializer,
|
||||
PolicyOverviewSerializer,
|
||||
PolicySerializer,
|
||||
PolicyTableSerializer,
|
||||
PolicyOverviewSerializer,
|
||||
PolicyCheckStatusSerializer,
|
||||
PolicyCheckSerializer,
|
||||
PolicyTaskStatusSerializer,
|
||||
AutoTaskPolicySerializer,
|
||||
RelatedClientPolicySerializer,
|
||||
RelatedSitePolicySerializer,
|
||||
RelatedAgentPolicySerializer,
|
||||
)
|
||||
|
||||
from .tasks import (
|
||||
generate_agent_checks_from_policies_task,
|
||||
generate_agent_checks_by_location_task,
|
||||
generate_agent_tasks_from_policies_task,
|
||||
run_win_policy_autotask_task,
|
||||
)
|
||||
from .tasks import run_win_policy_autotask_task
|
||||
|
||||
|
||||
class GetAddPolicies(APIView):
|
||||
@@ -72,39 +61,38 @@ class GetUpdateDeletePolicy(APIView):
|
||||
def put(self, request, pk):
|
||||
policy = get_object_or_404(Policy, pk=pk)
|
||||
|
||||
old_active = policy.active
|
||||
old_enforced = policy.enforced
|
||||
|
||||
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
saved_policy = serializer.save()
|
||||
|
||||
# Generate agent checks only if active and enforced were changed
|
||||
if saved_policy.active != old_active or saved_policy.enforced != old_enforced:
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
policypk=policy.pk,
|
||||
create_tasks=(saved_policy.active != old_active),
|
||||
)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
policy = get_object_or_404(Policy, pk=pk)
|
||||
|
||||
# delete all managed policy checks off of agents
|
||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk)
|
||||
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk)
|
||||
policy.delete()
|
||||
get_object_or_404(Policy, pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class PolicySync(APIView):
|
||||
def post(self, request):
|
||||
if "policy" in request.data.keys():
|
||||
from automation.tasks import generate_agent_checks_from_policies_task
|
||||
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
request.data["policy"], create_tasks=True
|
||||
)
|
||||
return Response("ok")
|
||||
|
||||
else:
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
|
||||
class PolicyAutoTask(APIView):
|
||||
|
||||
# tasks associated with policy
|
||||
def get(self, request, pk):
|
||||
policy = get_object_or_404(Policy, pk=pk)
|
||||
return Response(AutoTaskPolicySerializer(policy).data)
|
||||
tasks = AutomatedTask.objects.filter(policy=pk)
|
||||
return Response(AutoTasksFieldSerializer(tasks, many=True).data)
|
||||
|
||||
# get status of all tasks
|
||||
def patch(self, request, task):
|
||||
@@ -183,205 +171,12 @@ class GetRelated(APIView):
|
||||
).data
|
||||
|
||||
response["agents"] = AgentHostnameSerializer(
|
||||
policy.related_agents(),
|
||||
policy.related_agents().only("pk", "hostname"),
|
||||
many=True,
|
||||
).data
|
||||
|
||||
return Response(response)
|
||||
|
||||
# update agents, clients, sites to policy
|
||||
def post(self, request):
|
||||
|
||||
related_type = request.data["type"]
|
||||
pk = request.data["pk"]
|
||||
|
||||
# workstation policy is set
|
||||
if (
|
||||
"workstation_policy" in request.data
|
||||
and request.data["workstation_policy"] != 0
|
||||
):
|
||||
policy = get_object_or_404(Policy, pk=request.data["workstation_policy"])
|
||||
|
||||
if related_type == "client":
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
|
||||
# Check and see if workstation policy changed and regenerate policies
|
||||
if (
|
||||
not client.workstation_policy
|
||||
or client.workstation_policy
|
||||
and client.workstation_policy.pk != policy.pk
|
||||
):
|
||||
client.workstation_policy = policy
|
||||
client.save()
|
||||
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if related_type == "site":
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
|
||||
# Check and see if workstation policy changed and regenerate policies
|
||||
if (
|
||||
not site.workstation_policy
|
||||
or site.workstation_policy
|
||||
and site.workstation_policy.pk != policy.pk
|
||||
):
|
||||
site.workstation_policy = policy
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# server policy is set
|
||||
if "server_policy" in request.data and request.data["server_policy"] != 0:
|
||||
policy = get_object_or_404(Policy, pk=request.data["server_policy"])
|
||||
|
||||
if related_type == "client":
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
|
||||
# Check and see if server policy changed and regenerate policies
|
||||
if (
|
||||
not client.server_policy
|
||||
or client.server_policy
|
||||
and client.server_policy.pk != policy.pk
|
||||
):
|
||||
client.server_policy = policy
|
||||
client.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if related_type == "site":
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
|
||||
# Check and see if server policy changed and regenerate policies
|
||||
if (
|
||||
not site.server_policy
|
||||
or site.server_policy
|
||||
and site.server_policy.pk != policy.pk
|
||||
):
|
||||
site.server_policy = policy
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.id},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# If workstation policy was cleared
|
||||
if (
|
||||
"workstation_policy" in request.data
|
||||
and request.data["workstation_policy"] == 0
|
||||
):
|
||||
if related_type == "client":
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
|
||||
# Check if workstation policy is set and update it to None
|
||||
if client.workstation_policy:
|
||||
|
||||
client.workstation_policy = None
|
||||
client.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if related_type == "site":
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
|
||||
# Check if workstation policy is set and update it to None
|
||||
if site.workstation_policy:
|
||||
|
||||
site.workstation_policy = None
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# server policy cleared
|
||||
if "server_policy" in request.data and request.data["server_policy"] == 0:
|
||||
|
||||
if related_type == "client":
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
|
||||
# Check if server policy is set and update it to None
|
||||
if client.server_policy:
|
||||
|
||||
client.server_policy = None
|
||||
client.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if related_type == "site":
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
# Check if server policy is set and update it to None
|
||||
if site.server_policy:
|
||||
|
||||
site.server_policy = None
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# agent policies
|
||||
if related_type == "agent":
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
|
||||
if "policy" in request.data and request.data["policy"] != 0:
|
||||
policy = Policy.objects.get(pk=request.data["policy"])
|
||||
|
||||
# Check and see if policy changed and regenerate policies
|
||||
if not agent.policy or agent.policy and agent.policy.pk != policy.pk:
|
||||
agent.policy = policy
|
||||
agent.save()
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
else:
|
||||
if agent.policy:
|
||||
agent.policy = None
|
||||
agent.save()
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
# view to get policies set on client, site, and workstation
|
||||
def patch(self, request):
|
||||
related_type = request.data["type"]
|
||||
|
||||
# client, site, or agent pk
|
||||
pk = request.data["pk"]
|
||||
|
||||
if related_type == "agent":
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
return Response(RelatedAgentPolicySerializer(agent).data)
|
||||
|
||||
if related_type == "site":
|
||||
site = Site.objects.get(pk=pk)
|
||||
return Response(RelatedSitePolicySerializer(site).data)
|
||||
|
||||
if related_type == "client":
|
||||
client = Client.objects.get(pk=pk)
|
||||
return Response(RelatedClientPolicySerializer(client).data)
|
||||
|
||||
content = {"error": "Data was submitted incorrectly"}
|
||||
return Response(content, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class UpdatePatchPolicy(APIView):
|
||||
|
||||
@@ -391,7 +186,7 @@ class UpdatePatchPolicy(APIView):
|
||||
|
||||
serializer = WinUpdatePolicySerializer(data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.policy = policy
|
||||
serializer.policy = policy # type: ignore
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
from autotasks.tasks import remove_orphaned_win_tasks
|
||||
|
||||
@@ -7,7 +8,7 @@ class Command(BaseCommand):
|
||||
help = "Checks for orphaned tasks on all agents and removes them"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
agents = Agent.objects.only("pk", "last_seen", "overdue_time")
|
||||
agents = Agent.objects.only("pk", "last_seen", "overdue_time", "offline_time")
|
||||
online = [i for i in agents if i.status == "online"]
|
||||
for agent in online:
|
||||
remove_orphaned_win_tasks.delay(agent.pk)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Generated by Django 3.0.6 on 2020-05-31 01:23
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.db import migrations
|
||||
|
||||
from tacticalrmm.utils import get_bit_days
|
||||
|
||||
DAYS_OF_WEEK = {
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-27 22:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0010_migrate_days_to_bitdays'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='alert_severity',
|
||||
field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='None', max_length=30, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-28 04:17
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0011_automatedtask_alert_severity'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='email_alert',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='email_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='text_alert',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='text_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-29 03:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0012_auto_20210128_0417'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='alert_severity',
|
||||
field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-29 21:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0013_auto_20210129_0307'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='dashboard_alert',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-05 17:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0014_automatedtask_dashboard_alert'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='resolved_email_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='resolved_text_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-05 21:17
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0015_auto_20210205_1728'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('passing', 'Passing'), ('failing', 'Failing'), ('pending', 'Pending')], default='pending', max_length=30),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,29 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-10 15:12
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0016_automatedtask_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='automatedtask',
|
||||
name='email_sent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='automatedtask',
|
||||
name='resolved_email_sent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='automatedtask',
|
||||
name='resolved_text_sent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='automatedtask',
|
||||
name='text_sent',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-02-24 05:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0017_auto_20210210_1512'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='run_asap_after_missed',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,14 +1,20 @@
|
||||
import pytz
|
||||
import datetime as dt
|
||||
import random
|
||||
import string
|
||||
import datetime as dt
|
||||
|
||||
from django.db import models
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import DateTimeField
|
||||
from loguru import logger
|
||||
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
RUN_TIME_DAY_CHOICES = [
|
||||
(0, "Monday"),
|
||||
(1, "Tuesday"),
|
||||
@@ -32,6 +38,12 @@ SYNC_STATUS_CHOICES = [
|
||||
("pendingdeletion", "Pending Deletion on Agent"),
|
||||
]
|
||||
|
||||
TASK_STATUS_CHOICES = [
|
||||
("passing", "Passing"),
|
||||
("failing", "Failing"),
|
||||
("pending", "Pending"),
|
||||
]
|
||||
|
||||
|
||||
class AutomatedTask(BaseAuditModel):
|
||||
agent = models.ForeignKey(
|
||||
@@ -83,6 +95,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
)
|
||||
run_time_date = DateTimeField(null=True, blank=True)
|
||||
remove_if_not_scheduled = models.BooleanField(default=False)
|
||||
run_asap_after_missed = models.BooleanField(default=False) # added in agent v1.4.7
|
||||
managed_by_policy = models.BooleanField(default=False)
|
||||
parent_task = models.PositiveIntegerField(null=True, blank=True)
|
||||
win_task_name = models.CharField(max_length=255, null=True, blank=True)
|
||||
@@ -93,9 +106,18 @@ class AutomatedTask(BaseAuditModel):
|
||||
execution_time = models.CharField(max_length=100, default="0.0000")
|
||||
last_run = models.DateTimeField(null=True, blank=True)
|
||||
enabled = models.BooleanField(default=True)
|
||||
status = models.CharField(
|
||||
max_length=30, choices=TASK_STATUS_CHOICES, default="pending"
|
||||
)
|
||||
sync_status = models.CharField(
|
||||
max_length=100, choices=SYNC_STATUS_CHOICES, default="notsynced"
|
||||
)
|
||||
alert_severity = models.CharField(
|
||||
max_length=30, choices=SEVERITY_CHOICES, default="info"
|
||||
)
|
||||
email_alert = models.BooleanField(default=False)
|
||||
text_alert = models.BooleanField(default=False)
|
||||
dashboard_alert = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
@@ -140,22 +162,49 @@ class AutomatedTask(BaseAuditModel):
|
||||
def create_policy_task(self, agent=None, policy=None):
|
||||
from .tasks import create_win_task_schedule
|
||||
|
||||
# if policy is present, then this task is being copied to another policy
|
||||
# if agent is present, then this task is being created on an agent from a policy
|
||||
# exit if neither are set or if both are set
|
||||
if not agent and not policy or agent and policy:
|
||||
return
|
||||
|
||||
assigned_check = None
|
||||
|
||||
# get correct assigned check to task if set
|
||||
if agent and self.assigned_check:
|
||||
assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.pk)
|
||||
# check if there is a matching check on the agent
|
||||
if agent.agentchecks.filter(parent_check=self.assigned_check.pk).exists():
|
||||
assigned_check = agent.agentchecks.filter(
|
||||
parent_check=self.assigned_check.pk
|
||||
).first()
|
||||
# check was overriden by agent and we need to use that agents check
|
||||
else:
|
||||
if agent.agentchecks.filter(
|
||||
check_type=self.assigned_check.check_type, overriden_by_policy=True
|
||||
).exists():
|
||||
assigned_check = agent.agentchecks.filter(
|
||||
check_type=self.assigned_check.check_type,
|
||||
overriden_by_policy=True,
|
||||
).first()
|
||||
elif policy and self.assigned_check:
|
||||
assigned_check = policy.policychecks.get(name=self.assigned_check.name)
|
||||
if policy.policychecks.filter(name=self.assigned_check.name).exists():
|
||||
assigned_check = policy.policychecks.filter(
|
||||
name=self.assigned_check.name
|
||||
).first()
|
||||
else:
|
||||
assigned_check = policy.policychecks.filter(
|
||||
check_type=self.assigned_check.check_type
|
||||
).first()
|
||||
|
||||
task = AutomatedTask.objects.create(
|
||||
agent=agent,
|
||||
policy=policy,
|
||||
managed_by_policy=bool(agent),
|
||||
parent_task=(self.pk if agent else None),
|
||||
alert_severity=self.alert_severity,
|
||||
email_alert=self.email_alert,
|
||||
text_alert=self.text_alert,
|
||||
dashboard_alert=self.dashboard_alert,
|
||||
script=self.script,
|
||||
script_args=self.script_args,
|
||||
assigned_check=assigned_check,
|
||||
@@ -169,6 +218,80 @@ class AutomatedTask(BaseAuditModel):
|
||||
timeout=self.timeout,
|
||||
enabled=self.enabled,
|
||||
remove_if_not_scheduled=self.remove_if_not_scheduled,
|
||||
run_asap_after_missed=self.run_asap_after_missed,
|
||||
)
|
||||
|
||||
create_win_task_schedule.delay(task.pk)
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
return (
|
||||
self.dashboard_alert
|
||||
or self.email_alert
|
||||
or self.text_alert
|
||||
or (
|
||||
alert_template
|
||||
and (
|
||||
alert_template.task_always_alert
|
||||
or alert_template.task_always_email
|
||||
or alert_template.task_always_text
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
def send_email(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
body = (
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, self.agent.alert_template)
|
||||
|
||||
def send_sms(self):
|
||||
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
body = (
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
|
||||
def send_resolved_email(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||
body = (
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||
|
||||
def send_resolved_sms(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||
body = (
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from .models import AutomatedTask
|
||||
from agents.models import Agent
|
||||
from scripts.models import Script
|
||||
|
||||
from scripts.serializers import ScriptCheckSerializer
|
||||
from checks.serializers import CheckSerializer
|
||||
from scripts.models import Script
|
||||
from scripts.serializers import ScriptCheckSerializer
|
||||
|
||||
from .models import AutomatedTask
|
||||
|
||||
|
||||
class TaskSerializer(serializers.ModelSerializer):
|
||||
@@ -14,6 +13,24 @@ class TaskSerializer(serializers.ModelSerializer):
|
||||
assigned_check = CheckSerializer(read_only=True)
|
||||
schedule = serializers.ReadOnlyField()
|
||||
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
|
||||
alert_template = serializers.SerializerMethodField()
|
||||
|
||||
def get_alert_template(self, obj):
|
||||
|
||||
if obj.agent:
|
||||
alert_template = obj.agent.alert_template
|
||||
else:
|
||||
alert_template = None
|
||||
|
||||
if not alert_template:
|
||||
return None
|
||||
else:
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.task_always_email,
|
||||
"always_text": alert_template.task_always_text,
|
||||
"always_alert": alert_template.task_always_alert,
|
||||
}
|
||||
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
from loguru import logger
|
||||
from tacticalrmm.celery import app
|
||||
from django.conf import settings
|
||||
import random
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from .models import AutomatedTask
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
from .models import AutomatedTask
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@@ -40,7 +45,7 @@ def create_win_task_schedule(pk, pending_action=False):
|
||||
task.run_time_date = now.astimezone(agent_tz).replace(
|
||||
tzinfo=pytz.utc
|
||||
) + djangotime.timedelta(minutes=5)
|
||||
task.save()
|
||||
task.save(update_fields=["run_time_date"])
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
@@ -57,9 +62,12 @@ def create_win_task_schedule(pk, pending_action=False):
|
||||
},
|
||||
}
|
||||
|
||||
if task.remove_if_not_scheduled and pyver.parse(
|
||||
if task.run_asap_after_missed and pyver.parse(
|
||||
task.agent.version
|
||||
) >= pyver.parse("1.1.2"):
|
||||
) >= pyver.parse("1.4.7"):
|
||||
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||
|
||||
if task.remove_if_not_scheduled:
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
elif task.task_type == "checkfailure" or task.task_type == "manual":
|
||||
@@ -243,3 +251,85 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
|
||||
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
# first time sending email
|
||||
if not alert.email_sent:
|
||||
sleep(random.randint(1, 10))
|
||||
alert.assigned_task.send_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
else:
|
||||
if alert_interval:
|
||||
# send an email only if the last email sent is older than alert interval
|
||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||
if alert.email_sent < delta:
|
||||
sleep(random.randint(1, 10))
|
||||
alert.assigned_task.send_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
# first time sending text
|
||||
if not alert.sms_sent:
|
||||
sleep(random.randint(1, 3))
|
||||
alert.assigned_task.send_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
else:
|
||||
if alert_interval:
|
||||
# send a text only if the last text sent is older than alert interval
|
||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||
if alert.sms_sent < delta:
|
||||
sleep(random.randint(1, 3))
|
||||
alert.assigned_task.send_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_resolved_task_sms_alert(pk: int) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
# first time sending text
|
||||
if not alert.resolved_sms_sent:
|
||||
sleep(random.randint(1, 3))
|
||||
alert.assigned_task.send_resolved_sms()
|
||||
alert.resolved_sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_sms_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_resolved_task_email_alert(pk: int) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
# first time sending email
|
||||
if not alert.resolved_email_sent:
|
||||
sleep(random.randint(1, 10))
|
||||
alert.assigned_task.send_resolved_email()
|
||||
alert.resolved_email_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_email_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import datetime as dt
|
||||
from unittest.mock import patch, call
|
||||
from model_bakery import baker
|
||||
from django.utils import timezone as djangotime
|
||||
from unittest.mock import call, patch
|
||||
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import AutomatedTask
|
||||
from logs.models import PendingAction
|
||||
from .serializers import AutoTaskSerializer
|
||||
from .tasks import remove_orphaned_win_tasks, run_win_task, create_win_task_schedule
|
||||
from .tasks import create_win_task_schedule, remove_orphaned_win_tasks, run_win_task
|
||||
|
||||
|
||||
class TestAutotaskViews(TacticalTestCase):
|
||||
@@ -28,7 +29,6 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
policy = baker.make("automation.Policy")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
|
||||
# test script set to invalid pk
|
||||
data = {"autotask": {"script": 500}}
|
||||
@@ -51,15 +51,6 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test old agent version
|
||||
data = {
|
||||
"autotask": {"script": script.id},
|
||||
"agent": old_agent.id,
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
# test add task to agent
|
||||
data = {
|
||||
"autotask": {
|
||||
@@ -150,7 +141,9 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
update_policy_task_fields_task.assert_called_with(policy_task.id, True)
|
||||
update_policy_task_fields_task.assert_called_with(
|
||||
policy_task.id, update_agent=True
|
||||
)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@@ -200,13 +193,6 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
nats_cmd.assert_called_with({"func": "runtask", "taskpk": task.id}, wait=False)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
task2 = baker.make("autotasks.AutomatedTask", agent=old_agent)
|
||||
url = f"/tasks/runwintask/{task2.id}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
nats_cmd.assert_not_called()
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
|
||||
@@ -1,32 +1,28 @@
|
||||
import asyncio
|
||||
import pytz
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from .models import AutomatedTask
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
|
||||
from scripts.models import Script
|
||||
from core.models import CoreSettings
|
||||
|
||||
from .serializers import TaskSerializer, AutoTaskSerializer
|
||||
from tacticalrmm.utils import get_bit_days, get_default_timezone, notify_error
|
||||
|
||||
from .models import AutomatedTask
|
||||
from .serializers import AutoTaskSerializer, TaskSerializer
|
||||
from .tasks import (
|
||||
create_win_task_schedule,
|
||||
delete_win_task_schedule,
|
||||
enable_or_disable_win_task,
|
||||
)
|
||||
from tacticalrmm.utils import notify_error, get_bit_days
|
||||
|
||||
|
||||
class AddAutoTask(APIView):
|
||||
def post(self, request):
|
||||
from automation.tasks import generate_agent_tasks_from_policies_task
|
||||
from automation.models import Policy
|
||||
from automation.tasks import generate_agent_tasks_from_policies_task
|
||||
|
||||
data = request.data
|
||||
script = get_object_or_404(Script, pk=data["autotask"]["script"])
|
||||
@@ -38,9 +34,6 @@ class AddAutoTask(APIView):
|
||||
parent = {"policy": policy}
|
||||
else:
|
||||
agent = get_object_or_404(Agent, pk=data["agent"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
parent = {"agent": agent}
|
||||
|
||||
check = None
|
||||
@@ -76,11 +69,25 @@ class AutoTask(APIView):
|
||||
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
ctx = {
|
||||
"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone),
|
||||
"default_tz": get_default_timezone(),
|
||||
"agent_tz": agent.time_zone,
|
||||
}
|
||||
return Response(AutoTaskSerializer(agent, context=ctx).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
from automation.tasks import update_policy_task_fields_task
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
serializer = TaskSerializer(instance=task, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
if task.policy:
|
||||
update_policy_task_fields_task.delay(task.pk)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def patch(self, request, pk):
|
||||
from automation.tasks import update_policy_task_fields_task
|
||||
|
||||
@@ -93,7 +100,7 @@ class AutoTask(APIView):
|
||||
enable_or_disable_win_task.delay(pk=task.pk, action=action)
|
||||
|
||||
else:
|
||||
update_policy_task_fields_task.delay(task.pk, action)
|
||||
update_policy_task_fields_task.delay(task.pk, update_agent=True)
|
||||
|
||||
task.enabled = action
|
||||
task.save(update_fields=["enabled"])
|
||||
@@ -118,8 +125,5 @@ class AutoTask(APIView):
|
||||
@api_view()
|
||||
def run_task(request, pk):
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
if not task.agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
return Response(f"{task.name} will now be run on {task.agent.hostname}")
|
||||
|
||||
@@ -1,21 +1,27 @@
|
||||
from .models import Check
|
||||
from model_bakery.recipe import Recipe, seq
|
||||
from model_bakery.recipe import Recipe
|
||||
|
||||
check = Recipe(Check)
|
||||
check = Recipe("checks.Check")
|
||||
|
||||
diskspace_check = check.extend(check_type="diskspace", disk="C:", threshold=75)
|
||||
diskspace_check = check.extend(
|
||||
check_type="diskspace", disk="C:", warning_threshold=30, error_threshold=10
|
||||
)
|
||||
|
||||
cpuload_check = check.extend(check_type="cpuload", threshold=75)
|
||||
cpuload_check = check.extend(
|
||||
check_type="cpuload", warning_threshold=30, error_threshold=75
|
||||
)
|
||||
|
||||
ping_check = check.extend(check_type="ping", ip="10.10.10.10")
|
||||
|
||||
memory_check = check.extend(check_type="memory", threshold=75)
|
||||
memory_check = check.extend(
|
||||
check_type="memory", warning_threshold=60, error_threshold=75
|
||||
)
|
||||
|
||||
winsvc_check = check.extend(
|
||||
check_type="winsvc",
|
||||
svc_name="ServiceName",
|
||||
svc_display_name="ServiceName",
|
||||
svc_policy_mode="manual",
|
||||
pass_if_svc_not_exist=False,
|
||||
)
|
||||
|
||||
eventlog_check = check.extend(
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
import django.contrib.postgres.fields
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-09 21:36
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
43
api/tacticalrmm/checks/migrations/0016_auto_20210123_0149.py
Normal file
43
api/tacticalrmm/checks/migrations/0016_auto_20210123_0149.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-23 01:49
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0015_auto_20210110_1808'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='check',
|
||||
name='threshold',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='check',
|
||||
name='alert_severity',
|
||||
field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='warning', max_length=15),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='check',
|
||||
name='error_threshold',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='check',
|
||||
name='info_return_codes',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveIntegerField(), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='check',
|
||||
name='warning_return_codes',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveIntegerField(), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='check',
|
||||
name='warning_threshold',
|
||||
field=models.PositiveIntegerField(blank=True, default=0, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-29 21:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0016_auto_20210123_0149'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='check',
|
||||
name='dashboard_alert',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/checks/migrations/0018_auto_20210205_1647.py
Normal file
18
api/tacticalrmm/checks/migrations/0018_auto_20210205_1647.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-05 16:47
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0017_check_dashboard_alert'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='check',
|
||||
name='alert_severity',
|
||||
field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='warning', max_length=15, null=True),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/checks/migrations/0019_auto_20210205_1728.py
Normal file
23
api/tacticalrmm/checks/migrations/0019_auto_20210205_1728.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-05 17:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0018_auto_20210205_1647'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='check',
|
||||
name='resolved_email_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='check',
|
||||
name='resolved_text_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
29
api/tacticalrmm/checks/migrations/0020_auto_20210210_1512.py
Normal file
29
api/tacticalrmm/checks/migrations/0020_auto_20210210_1512.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-10 15:12
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0019_auto_20210205_1728'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='check',
|
||||
name='email_sent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='check',
|
||||
name='resolved_email_sent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='check',
|
||||
name='resolved_text_sent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='check',
|
||||
name='text_sent',
|
||||
),
|
||||
]
|
||||
24
api/tacticalrmm/checks/migrations/0021_auto_20210212_1429.py
Normal file
24
api/tacticalrmm/checks/migrations/0021_auto_20210212_1429.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-12 14:29
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0020_auto_20210210_1512'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='check',
|
||||
name='error_threshold',
|
||||
field=models.PositiveIntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(99)]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='check',
|
||||
name='warning_threshold',
|
||||
field=models.PositiveIntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(99)]),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-06 02:18
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0021_auto_20210212_1429'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='check',
|
||||
name='number_of_events_b4_alert',
|
||||
field=models.PositiveIntegerField(blank=True, default=1, null=True),
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/checks/migrations/0023_check_run_interval.py
Normal file
18
api/tacticalrmm/checks/migrations/0023_check_run_interval.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-06 02:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0022_check_number_of_events_b4_alert'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='check',
|
||||
name='run_interval',
|
||||
field=models.PositiveIntegerField(blank=True, default=0),
|
||||
),
|
||||
]
|
||||
@@ -1,21 +1,25 @@
|
||||
import asyncio
|
||||
import string
|
||||
import os
|
||||
import json
|
||||
import pytz
|
||||
from statistics import mean, mode
|
||||
import os
|
||||
import string
|
||||
from statistics import mean
|
||||
from typing import Any
|
||||
|
||||
from django.db import models
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from rest_framework.fields import JSONField
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from core.models import CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
from .tasks import handle_check_email_alert_task, handle_check_sms_alert_task
|
||||
|
||||
from .utils import bytes2human
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
CHECK_TYPE_CHOICES = [
|
||||
("diskspace", "Disk Space Check"),
|
||||
("ping", "Ping Check"),
|
||||
@@ -84,18 +88,35 @@ class Check(BaseAuditModel):
|
||||
last_run = models.DateTimeField(null=True, blank=True)
|
||||
email_alert = models.BooleanField(default=False)
|
||||
text_alert = models.BooleanField(default=False)
|
||||
dashboard_alert = models.BooleanField(default=False)
|
||||
fails_b4_alert = models.PositiveIntegerField(default=1)
|
||||
fail_count = models.PositiveIntegerField(default=0)
|
||||
email_sent = models.DateTimeField(null=True, blank=True)
|
||||
text_sent = models.DateTimeField(null=True, blank=True)
|
||||
outage_history = models.JSONField(null=True, blank=True) # store
|
||||
extra_details = models.JSONField(null=True, blank=True)
|
||||
|
||||
run_interval = models.PositiveIntegerField(blank=True, default=0)
|
||||
# check specific fields
|
||||
|
||||
# for eventlog, script, ip, and service alert severity
|
||||
alert_severity = models.CharField(
|
||||
max_length=15,
|
||||
choices=SEVERITY_CHOICES,
|
||||
default="warning",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
# threshold percent for diskspace, cpuload or memory check
|
||||
threshold = models.PositiveIntegerField(
|
||||
null=True, blank=True, validators=[MinValueValidator(1), MaxValueValidator(99)]
|
||||
error_threshold = models.PositiveIntegerField(
|
||||
validators=[MinValueValidator(0), MaxValueValidator(99)],
|
||||
null=True,
|
||||
blank=True,
|
||||
default=0,
|
||||
)
|
||||
warning_threshold = models.PositiveIntegerField(
|
||||
null=True,
|
||||
blank=True,
|
||||
validators=[MinValueValidator(0), MaxValueValidator(99)],
|
||||
default=0,
|
||||
)
|
||||
# diskcheck i.e C:, D: etc
|
||||
disk = models.CharField(max_length=2, null=True, blank=True)
|
||||
@@ -115,6 +136,18 @@ class Check(BaseAuditModel):
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
info_return_codes = ArrayField(
|
||||
models.PositiveIntegerField(),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
warning_return_codes = ArrayField(
|
||||
models.PositiveIntegerField(),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
timeout = models.PositiveIntegerField(null=True, blank=True)
|
||||
stdout = models.TextField(null=True, blank=True)
|
||||
stderr = models.TextField(null=True, blank=True)
|
||||
@@ -149,6 +182,9 @@ class Check(BaseAuditModel):
|
||||
max_length=255, choices=EVT_LOG_FAIL_WHEN_CHOICES, null=True, blank=True
|
||||
)
|
||||
search_last_days = models.PositiveIntegerField(null=True, blank=True)
|
||||
number_of_events_b4_alert = models.PositiveIntegerField(
|
||||
null=True, blank=True, default=1
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
if self.agent:
|
||||
@@ -159,17 +195,31 @@ class Check(BaseAuditModel):
|
||||
@property
|
||||
def readable_desc(self):
|
||||
if self.check_type == "diskspace":
|
||||
return f"{self.get_check_type_display()}: Drive {self.disk} < {self.threshold}%"
|
||||
|
||||
text = ""
|
||||
if self.warning_threshold:
|
||||
text += f" Warning Threshold: {self.warning_threshold}%"
|
||||
if self.error_threshold:
|
||||
text += f" Error Threshold: {self.error_threshold}%"
|
||||
|
||||
return f"{self.get_check_type_display()}: Drive {self.disk} - {text}" # type: ignore
|
||||
elif self.check_type == "ping":
|
||||
return f"{self.get_check_type_display()}: {self.name}"
|
||||
return f"{self.get_check_type_display()}: {self.name}" # type: ignore
|
||||
elif self.check_type == "cpuload" or self.check_type == "memory":
|
||||
return f"{self.get_check_type_display()} > {self.threshold}%"
|
||||
|
||||
text = ""
|
||||
if self.warning_threshold:
|
||||
text += f" Warning Threshold: {self.warning_threshold}%"
|
||||
if self.error_threshold:
|
||||
text += f" Error Threshold: {self.error_threshold}%"
|
||||
|
||||
return f"{self.get_check_type_display()} - {text}" # type: ignore
|
||||
elif self.check_type == "winsvc":
|
||||
return f"{self.get_check_type_display()}: {self.svc_display_name}"
|
||||
return f"{self.get_check_type_display()}: {self.svc_display_name}" # type: ignore
|
||||
elif self.check_type == "eventlog":
|
||||
return f"{self.get_check_type_display()}: {self.name}"
|
||||
return f"{self.get_check_type_display()}: {self.name}" # type: ignore
|
||||
elif self.check_type == "script":
|
||||
return f"{self.get_check_type_display()}: {self.script.name}"
|
||||
return f"{self.get_check_type_display()}: {self.script.name}" # type: ignore
|
||||
else:
|
||||
return "n/a"
|
||||
|
||||
@@ -188,15 +238,13 @@ class Check(BaseAuditModel):
|
||||
return self.last_run
|
||||
|
||||
@property
|
||||
def non_editable_fields(self):
|
||||
def non_editable_fields(self) -> list[str]:
|
||||
return [
|
||||
"check_type",
|
||||
"status",
|
||||
"more_info",
|
||||
"last_run",
|
||||
"fail_count",
|
||||
"email_sent",
|
||||
"text_sent",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"stdout",
|
||||
@@ -215,10 +263,28 @@ class Check(BaseAuditModel):
|
||||
"modified_time",
|
||||
]
|
||||
|
||||
def add_check_history(self, value, more_info=None):
|
||||
def should_create_alert(self, alert_template=None):
|
||||
|
||||
return (
|
||||
self.dashboard_alert
|
||||
or self.email_alert
|
||||
or self.text_alert
|
||||
or (
|
||||
alert_template
|
||||
and (
|
||||
alert_template.check_always_alert
|
||||
or alert_template.check_always_email
|
||||
or alert_template.check_always_text
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
def add_check_history(self, value: int, more_info: Any = None) -> None:
|
||||
CheckHistory.objects.create(check_history=self, y=value, results=more_info)
|
||||
|
||||
def handle_checkv2(self, data):
|
||||
from alerts.models import Alert
|
||||
|
||||
# cpuload or mem checks
|
||||
if self.check_type == "cpuload" or self.check_type == "memory":
|
||||
|
||||
@@ -231,8 +297,12 @@ class Check(BaseAuditModel):
|
||||
|
||||
avg = int(mean(self.history))
|
||||
|
||||
if avg > self.threshold:
|
||||
if self.error_threshold and avg > self.error_threshold:
|
||||
self.status = "failing"
|
||||
self.alert_severity = "error"
|
||||
elif self.warning_threshold and avg > self.warning_threshold:
|
||||
self.status = "failing"
|
||||
self.alert_severity = "warning"
|
||||
else:
|
||||
self.status = "passing"
|
||||
|
||||
@@ -246,17 +316,26 @@ class Check(BaseAuditModel):
|
||||
total = bytes2human(data["total"])
|
||||
free = bytes2human(data["free"])
|
||||
|
||||
if (100 - percent_used) < self.threshold:
|
||||
if self.error_threshold and (100 - percent_used) < self.error_threshold:
|
||||
self.status = "failing"
|
||||
self.alert_severity = "error"
|
||||
elif (
|
||||
self.warning_threshold
|
||||
and (100 - percent_used) < self.warning_threshold
|
||||
):
|
||||
self.status = "failing"
|
||||
self.alert_severity = "warning"
|
||||
|
||||
else:
|
||||
self.status = "passing"
|
||||
|
||||
self.more_info = f"Total: {total}B, Free: {free}B"
|
||||
|
||||
# add check history
|
||||
self.add_check_history(percent_used)
|
||||
self.add_check_history(100 - percent_used)
|
||||
else:
|
||||
self.status = "failing"
|
||||
self.alert_severity = "error"
|
||||
self.more_info = f"Disk {self.disk} does not exist"
|
||||
|
||||
self.save(update_fields=["more_info"])
|
||||
@@ -273,8 +352,15 @@ class Check(BaseAuditModel):
|
||||
# golang agent
|
||||
self.execution_time = "{:.4f}".format(data["runtime"])
|
||||
|
||||
if data["retcode"] != 0:
|
||||
if data["retcode"] in self.info_return_codes:
|
||||
self.alert_severity = "info"
|
||||
self.status = "failing"
|
||||
elif data["retcode"] in self.warning_return_codes:
|
||||
self.alert_severity = "warning"
|
||||
self.status = "failing"
|
||||
elif data["retcode"] != 0:
|
||||
self.status = "failing"
|
||||
self.alert_severity = "error"
|
||||
else:
|
||||
self.status = "passing"
|
||||
|
||||
@@ -406,13 +492,13 @@ class Check(BaseAuditModel):
|
||||
log.append(i)
|
||||
|
||||
if self.fail_when == "contains":
|
||||
if log:
|
||||
if log and len(log) >= self.number_of_events_b4_alert:
|
||||
self.status = "failing"
|
||||
else:
|
||||
self.status = "passing"
|
||||
|
||||
elif self.fail_when == "not_contains":
|
||||
if log:
|
||||
if log and len(log) >= self.number_of_events_b4_alert:
|
||||
self.status = "passing"
|
||||
else:
|
||||
self.status = "failing"
|
||||
@@ -428,59 +514,19 @@ class Check(BaseAuditModel):
|
||||
# handle status
|
||||
if self.status == "failing":
|
||||
self.fail_count += 1
|
||||
self.save(update_fields=["status", "fail_count"])
|
||||
|
||||
elif self.status == "passing":
|
||||
if self.fail_count != 0:
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["status", "fail_count"])
|
||||
else:
|
||||
self.save(update_fields=["status"])
|
||||
self.save(update_fields=["status", "fail_count", "alert_severity"])
|
||||
|
||||
if self.fail_count >= self.fails_b4_alert:
|
||||
if self.email_alert:
|
||||
handle_check_email_alert_task.delay(self.pk)
|
||||
if self.text_alert:
|
||||
handle_check_sms_alert_task.delay(self.pk)
|
||||
Alert.handle_alert_failure(self)
|
||||
|
||||
elif self.status == "passing":
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["status", "fail_count", "alert_severity"])
|
||||
if Alert.objects.filter(assigned_check=self, resolved=False).exists():
|
||||
Alert.handle_alert_resolve(self)
|
||||
|
||||
return self.status
|
||||
|
||||
def handle_check(self, data):
|
||||
if self.check_type != "cpuload" and self.check_type != "memory":
|
||||
|
||||
if data["status"] == "passing" and self.fail_count != 0:
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["fail_count"])
|
||||
|
||||
elif data["status"] == "failing":
|
||||
self.fail_count += 1
|
||||
self.save(update_fields=["fail_count"])
|
||||
|
||||
else:
|
||||
self.history.append(data["percent"])
|
||||
|
||||
if len(self.history) > 15:
|
||||
self.history = self.history[-15:]
|
||||
|
||||
self.save(update_fields=["history"])
|
||||
|
||||
avg = int(mean(self.history))
|
||||
|
||||
if avg > self.threshold:
|
||||
self.status = "failing"
|
||||
self.fail_count += 1
|
||||
self.save(update_fields=["status", "fail_count"])
|
||||
else:
|
||||
self.status = "passing"
|
||||
if self.fail_count != 0:
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["status", "fail_count"])
|
||||
else:
|
||||
self.save(update_fields=["status"])
|
||||
|
||||
if self.email_alert and self.fail_count >= self.fails_b4_alert:
|
||||
handle_check_email_alert_task.delay(self.pk)
|
||||
|
||||
@staticmethod
|
||||
def serialize(check):
|
||||
# serializes the check and returns json
|
||||
@@ -514,17 +560,23 @@ class Check(BaseAuditModel):
|
||||
managed_by_policy=bool(agent),
|
||||
parent_check=(self.pk if agent else None),
|
||||
name=self.name,
|
||||
alert_severity=self.alert_severity,
|
||||
check_type=self.check_type,
|
||||
email_alert=self.email_alert,
|
||||
dashboard_alert=self.dashboard_alert,
|
||||
text_alert=self.text_alert,
|
||||
fails_b4_alert=self.fails_b4_alert,
|
||||
extra_details=self.extra_details,
|
||||
threshold=self.threshold,
|
||||
run_interval=self.run_interval,
|
||||
error_threshold=self.error_threshold,
|
||||
warning_threshold=self.warning_threshold,
|
||||
disk=self.disk,
|
||||
ip=self.ip,
|
||||
script=self.script,
|
||||
script_args=self.script_args,
|
||||
timeout=self.timeout,
|
||||
info_return_codes=self.info_return_codes,
|
||||
warning_return_codes=self.warning_return_codes,
|
||||
svc_name=self.svc_name,
|
||||
svc_display_name=self.svc_display_name,
|
||||
pass_if_start_pending=self.pass_if_start_pending,
|
||||
@@ -539,6 +591,7 @@ class Check(BaseAuditModel):
|
||||
event_message=self.event_message,
|
||||
fail_when=self.fail_when,
|
||||
search_last_days=self.search_last_days,
|
||||
number_of_events_b4_alert=self.number_of_events_b4_alert,
|
||||
)
|
||||
|
||||
def is_duplicate(self, check):
|
||||
@@ -567,18 +620,25 @@ class Check(BaseAuditModel):
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
body: str = ""
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
if self.check_type == "diskspace":
|
||||
text = ""
|
||||
if self.warning_threshold:
|
||||
text += f" Warning Threshold: {self.warning_threshold}%"
|
||||
if self.error_threshold:
|
||||
text += f" Error Threshold: {self.error_threshold}%"
|
||||
|
||||
percent_used = [
|
||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||
][0]
|
||||
percent_free = 100 - percent_used
|
||||
|
||||
body = subject + f" - Free: {percent_free}%, Threshold: {self.threshold}%"
|
||||
body = subject + f" - Free: {percent_free}%, {text}"
|
||||
|
||||
elif self.check_type == "script":
|
||||
|
||||
@@ -592,26 +652,29 @@ class Check(BaseAuditModel):
|
||||
body = self.more_info
|
||||
|
||||
elif self.check_type == "cpuload" or self.check_type == "memory":
|
||||
text = ""
|
||||
if self.warning_threshold:
|
||||
text += f" Warning Threshold: {self.warning_threshold}%"
|
||||
if self.error_threshold:
|
||||
text += f" Error Threshold: {self.error_threshold}%"
|
||||
|
||||
avg = int(mean(self.history))
|
||||
|
||||
if self.check_type == "cpuload":
|
||||
body = (
|
||||
subject
|
||||
+ f" - Average CPU utilization: {avg}%, Threshold: {self.threshold}%"
|
||||
)
|
||||
body = subject + f" - Average CPU utilization: {avg}%, {text}"
|
||||
|
||||
elif self.check_type == "memory":
|
||||
body = (
|
||||
subject
|
||||
+ f" - Average memory usage: {avg}%, Threshold: {self.threshold}%"
|
||||
)
|
||||
body = subject + f" - Average memory usage: {avg}%, {text}"
|
||||
|
||||
elif self.check_type == "winsvc":
|
||||
|
||||
try:
|
||||
status = list(
|
||||
filter(lambda x: x["name"] == self.svc_name, self.agent.services)
|
||||
)[0]["status"]
|
||||
# catch services that don't exist if policy check
|
||||
except:
|
||||
status = "Unknown"
|
||||
|
||||
body = subject + f" - Status: {status.upper()}"
|
||||
|
||||
@@ -637,11 +700,12 @@ class Check(BaseAuditModel):
|
||||
except:
|
||||
continue
|
||||
|
||||
CORE.send_mail(subject, body)
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||
|
||||
def send_sms(self):
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
body: str = ""
|
||||
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
@@ -649,27 +713,33 @@ class Check(BaseAuditModel):
|
||||
subject = f"{self} Failed"
|
||||
|
||||
if self.check_type == "diskspace":
|
||||
text = ""
|
||||
if self.warning_threshold:
|
||||
text += f" Warning Threshold: {self.warning_threshold}%"
|
||||
if self.error_threshold:
|
||||
text += f" Error Threshold: {self.error_threshold}%"
|
||||
|
||||
percent_used = [
|
||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||
][0]
|
||||
percent_free = 100 - percent_used
|
||||
body = subject + f" - Free: {percent_free}%, Threshold: {self.threshold}%"
|
||||
body = subject + f" - Free: {percent_free}%, {text}"
|
||||
elif self.check_type == "script":
|
||||
body = subject + f" - Return code: {self.retcode}"
|
||||
elif self.check_type == "ping":
|
||||
body = subject
|
||||
elif self.check_type == "cpuload" or self.check_type == "memory":
|
||||
text = ""
|
||||
if self.warning_threshold:
|
||||
text += f" Warning Threshold: {self.warning_threshold}%"
|
||||
if self.error_threshold:
|
||||
text += f" Error Threshold: {self.error_threshold}%"
|
||||
|
||||
avg = int(mean(self.history))
|
||||
if self.check_type == "cpuload":
|
||||
body = (
|
||||
subject
|
||||
+ f" - Average CPU utilization: {avg}%, Threshold: {self.threshold}%"
|
||||
)
|
||||
body = subject + f" - Average CPU utilization: {avg}%, {text}"
|
||||
elif self.check_type == "memory":
|
||||
body = (
|
||||
subject
|
||||
+ f" - Average memory usage: {avg}%, Threshold: {self.threshold}%"
|
||||
)
|
||||
body = subject + f" - Average memory usage: {avg}%, {text}"
|
||||
elif self.check_type == "winsvc":
|
||||
status = list(
|
||||
filter(lambda x: x["name"] == self.svc_name, self.agent.services)
|
||||
@@ -678,7 +748,21 @@ class Check(BaseAuditModel):
|
||||
elif self.check_type == "eventlog":
|
||||
body = subject
|
||||
|
||||
CORE.send_sms(body)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
|
||||
def send_resolved_email(self):
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||
body = f"{self} is now back to normal"
|
||||
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||
|
||||
def send_resolved_sms(self):
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||
CORE.send_sms(subject, alert_template=self.agent.alert_template)
|
||||
|
||||
|
||||
class CheckHistory(models.Model):
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import validators as _v
|
||||
import pytz
|
||||
import validators as _v
|
||||
from rest_framework import serializers
|
||||
|
||||
from .models import Check, CheckHistory
|
||||
from autotasks.models import AutomatedTask
|
||||
from scripts.serializers import ScriptSerializer, ScriptCheckSerializer
|
||||
from scripts.serializers import ScriptCheckSerializer, ScriptSerializer
|
||||
|
||||
from .models import Check, CheckHistory
|
||||
|
||||
|
||||
class AssignedTaskField(serializers.ModelSerializer):
|
||||
@@ -20,6 +21,23 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
assigned_task = serializers.SerializerMethodField()
|
||||
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
|
||||
history_info = serializers.ReadOnlyField()
|
||||
alert_template = serializers.SerializerMethodField()
|
||||
|
||||
def get_alert_template(self, obj):
|
||||
if obj.agent:
|
||||
alert_template = obj.agent.alert_template
|
||||
else:
|
||||
alert_template = None
|
||||
|
||||
if not alert_template:
|
||||
return None
|
||||
else:
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.check_always_email,
|
||||
"always_text": alert_template.check_always_text,
|
||||
"always_alert": alert_template.check_always_alert,
|
||||
}
|
||||
|
||||
## Change to return only array of tasks after 9/25/2020
|
||||
def get_assigned_task(self, obj):
|
||||
@@ -40,9 +58,11 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
check_type = val["check_type"]
|
||||
except KeyError:
|
||||
return val
|
||||
|
||||
# disk checks
|
||||
# make sure no duplicate diskchecks exist for an agent/policy
|
||||
if check_type == "diskspace" and not self.instance: # only on create
|
||||
if check_type == "diskspace":
|
||||
if not self.instance: # only on create
|
||||
checks = (
|
||||
Check.objects.filter(**self.context)
|
||||
.filter(check_type="diskspace")
|
||||
@@ -54,6 +74,20 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
f"A disk check for Drive {val['disk']} already exists!"
|
||||
)
|
||||
|
||||
if not val["warning_threshold"] and not val["error_threshold"]:
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold or Error Threshold must be set"
|
||||
)
|
||||
|
||||
if (
|
||||
val["warning_threshold"] < val["error_threshold"]
|
||||
and val["warning_threshold"] > 0
|
||||
and val["error_threshold"] > 0
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold must be greater than Error Threshold"
|
||||
)
|
||||
|
||||
# ping checks
|
||||
if check_type == "ping":
|
||||
if (
|
||||
@@ -75,6 +109,20 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
"A cpuload check for this agent already exists"
|
||||
)
|
||||
|
||||
if not val["warning_threshold"] and not val["error_threshold"]:
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold or Error Threshold must be set"
|
||||
)
|
||||
|
||||
if (
|
||||
val["warning_threshold"] > val["error_threshold"]
|
||||
and val["warning_threshold"] > 0
|
||||
and val["error_threshold"] > 0
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold must be less than Error Threshold"
|
||||
)
|
||||
|
||||
if check_type == "memory" and not self.instance:
|
||||
if (
|
||||
Check.objects.filter(**self.context, check_type="memory")
|
||||
@@ -85,6 +133,20 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
"A memory check for this agent already exists"
|
||||
)
|
||||
|
||||
if not val["warning_threshold"] and not val["error_threshold"]:
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold or Error Threshold must be set"
|
||||
)
|
||||
|
||||
if (
|
||||
val["warning_threshold"] > val["error_threshold"]
|
||||
and val["warning_threshold"] > 0
|
||||
and val["error_threshold"] > 0
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold must be less than Error Threshold"
|
||||
)
|
||||
|
||||
return val
|
||||
|
||||
|
||||
@@ -95,101 +157,7 @@ class AssignedTaskCheckRunnerField(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
# for the windows agent
|
||||
# only send data needed for agent to run a check
|
||||
|
||||
assigned_task = serializers.SerializerMethodField()
|
||||
script = ScriptSerializer(read_only=True)
|
||||
|
||||
def get_assigned_task(self, obj):
|
||||
if obj.assignedtask.exists():
|
||||
# this will not break agents on version 0.10.2 or lower
|
||||
# newer agents once released will properly handle multiple tasks assigned to a check
|
||||
task = obj.assignedtask.first()
|
||||
return AssignedTaskCheckRunnerField(task).data
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
exclude = [
|
||||
"policy",
|
||||
"managed_by_policy",
|
||||
"overriden_by_policy",
|
||||
"parent_check",
|
||||
"name",
|
||||
"more_info",
|
||||
"last_run",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"fails_b4_alert",
|
||||
"fail_count",
|
||||
"email_sent",
|
||||
"text_sent",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"stdout",
|
||||
"stderr",
|
||||
"retcode",
|
||||
"execution_time",
|
||||
"svc_display_name",
|
||||
"svc_policy_mode",
|
||||
"created_by",
|
||||
"created_time",
|
||||
"modified_by",
|
||||
"modified_time",
|
||||
"history",
|
||||
]
|
||||
|
||||
|
||||
class CheckRunnerGetSerializerV2(serializers.ModelSerializer):
|
||||
# for the windows __python__ agent
|
||||
# only send data needed for agent to run a check
|
||||
|
||||
assigned_tasks = serializers.SerializerMethodField()
|
||||
script = ScriptSerializer(read_only=True)
|
||||
|
||||
def get_assigned_tasks(self, obj):
|
||||
if obj.assignedtask.exists():
|
||||
tasks = obj.assignedtask.all()
|
||||
return AssignedTaskCheckRunnerField(tasks, many=True).data
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
exclude = [
|
||||
"policy",
|
||||
"managed_by_policy",
|
||||
"overriden_by_policy",
|
||||
"parent_check",
|
||||
"name",
|
||||
"more_info",
|
||||
"last_run",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"fails_b4_alert",
|
||||
"fail_count",
|
||||
"email_sent",
|
||||
"text_sent",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"stdout",
|
||||
"stderr",
|
||||
"retcode",
|
||||
"execution_time",
|
||||
"svc_display_name",
|
||||
"svc_policy_mode",
|
||||
"created_by",
|
||||
"created_time",
|
||||
"modified_by",
|
||||
"modified_time",
|
||||
"history",
|
||||
]
|
||||
|
||||
|
||||
class CheckRunnerGetSerializerV3(serializers.ModelSerializer):
|
||||
# for the windows __golang__ agent
|
||||
# only send data needed for agent to run a check
|
||||
# the difference here is in the script serializer
|
||||
# script checks no longer rely on salt and are executed directly by the go agent
|
||||
|
||||
assigned_tasks = serializers.SerializerMethodField()
|
||||
script = ScriptCheckSerializer(read_only=True)
|
||||
|
||||
@@ -212,8 +180,6 @@ class CheckRunnerGetSerializerV3(serializers.ModelSerializer):
|
||||
"text_alert",
|
||||
"fails_b4_alert",
|
||||
"fail_count",
|
||||
"email_sent",
|
||||
"text_sent",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"stdout",
|
||||
|
||||
@@ -1,57 +1,91 @@
|
||||
import datetime as dt
|
||||
import random
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_check_email_alert_task(pk):
|
||||
from .models import Check
|
||||
def handle_check_email_alert_task(pk, alert_interval: Union[float, None] = None) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
check = Check.objects.get(pk=pk)
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
if not check.agent.maintenance_mode:
|
||||
# first time sending email
|
||||
if not check.email_sent:
|
||||
if not alert.email_sent:
|
||||
sleep(random.randint(1, 10))
|
||||
check.send_email()
|
||||
check.email_sent = djangotime.now()
|
||||
check.save(update_fields=["email_sent"])
|
||||
alert.assigned_check.send_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
else:
|
||||
# send an email only if the last email sent is older than 24 hours
|
||||
delta = djangotime.now() - dt.timedelta(hours=24)
|
||||
if check.email_sent < delta:
|
||||
if alert_interval:
|
||||
# send an email only if the last email sent is older than alert interval
|
||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||
if alert.email_sent < delta:
|
||||
sleep(random.randint(1, 10))
|
||||
check.send_email()
|
||||
check.email_sent = djangotime.now()
|
||||
check.save(update_fields=["email_sent"])
|
||||
alert.assigned_check.send_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_check_sms_alert_task(pk):
|
||||
from .models import Check
|
||||
def handle_check_sms_alert_task(pk, alert_interval: Union[float, None] = None) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
check = Check.objects.get(pk=pk)
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
if not check.agent.maintenance_mode:
|
||||
# first time sending text
|
||||
if not check.text_sent:
|
||||
if not alert.sms_sent:
|
||||
sleep(random.randint(1, 3))
|
||||
check.send_sms()
|
||||
check.text_sent = djangotime.now()
|
||||
check.save(update_fields=["text_sent"])
|
||||
alert.assigned_check.send_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
else:
|
||||
if alert_interval:
|
||||
# send a text only if the last text sent is older than 24 hours
|
||||
delta = djangotime.now() - dt.timedelta(hours=24)
|
||||
if check.text_sent < delta:
|
||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||
if alert.sms_sent < delta:
|
||||
sleep(random.randint(1, 3))
|
||||
check.send_sms()
|
||||
check.text_sent = djangotime.now()
|
||||
check.save(update_fields=["text_sent"])
|
||||
alert.assigned_check.send_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_resolved_check_sms_alert_task(pk: int) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
# first time sending text
|
||||
if not alert.resolved_sms_sent:
|
||||
sleep(random.randint(1, 3))
|
||||
alert.assigned_check.send_resolved_sms()
|
||||
alert.resolved_sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_sms_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_resolved_check_email_alert_task(pk: int) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
# first time sending email
|
||||
if not alert.resolved_email_sent:
|
||||
sleep(random.randint(1, 10))
|
||||
alert.assigned_check.send_resolved_email()
|
||||
alert.resolved_email_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_email_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user