Compare commits
212 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e37f6cfda7 | ||
|
|
989c804409 | ||
|
|
7345bc3c82 | ||
|
|
69bee35700 | ||
|
|
598e24df7c | ||
|
|
0ae669201e | ||
|
|
f52a8a4642 | ||
|
|
9c40b61ef2 | ||
|
|
72dabcda83 | ||
|
|
161a06dbcc | ||
|
|
8ed3d4e70c | ||
|
|
a4223ccc8a | ||
|
|
ca85923855 | ||
|
|
52bfe7c493 | ||
|
|
4786bd0cbe | ||
|
|
cadab160ff | ||
|
|
6a7f17b2b0 | ||
|
|
4986a4d775 | ||
|
|
903af0c2cf | ||
|
|
3282fa803c | ||
|
|
67cc47608d | ||
|
|
0411704b8b | ||
|
|
1de85b2c69 | ||
|
|
33b012f29d | ||
|
|
1357584df3 | ||
|
|
e15809e271 | ||
|
|
0da1950427 | ||
|
|
e590b921be | ||
|
|
09462692f5 | ||
|
|
c1d1b5f762 | ||
|
|
6b9c87b858 | ||
|
|
485b6eb904 | ||
|
|
057630bdb5 | ||
|
|
6b02873b30 | ||
|
|
0fa0fc6d6b | ||
|
|
339ec07465 | ||
|
|
cd2e798fea | ||
|
|
d5cadbeae2 | ||
|
|
8046a3ccae | ||
|
|
bf91d60b31 | ||
|
|
539c047ec8 | ||
|
|
290c18fa87 | ||
|
|
98c46f5e57 | ||
|
|
f8bd5b5b4e | ||
|
|
816d32edad | ||
|
|
8453835c05 | ||
|
|
9328c356c8 | ||
|
|
89e3c1fc94 | ||
|
|
67e54cd15d | ||
|
|
278ea24786 | ||
|
|
aba1662631 | ||
|
|
61eeb60c19 | ||
|
|
5e9a8f4806 | ||
|
|
4cb274e9bc | ||
|
|
8b9b1a6a35 | ||
|
|
2655964113 | ||
|
|
188bad061b | ||
|
|
3af4c329aa | ||
|
|
6c13395f7d | ||
|
|
77b32ba360 | ||
|
|
91dba291ac | ||
|
|
a6bc293640 | ||
|
|
53882d6e5f | ||
|
|
d68adfbf10 | ||
|
|
498a392d7f | ||
|
|
740f6c05db | ||
|
|
d810ce301f | ||
|
|
5ef6a14d24 | ||
|
|
a13f6f1e68 | ||
|
|
d2d0f1aaee | ||
|
|
e64c72cc89 | ||
|
|
9ab915a08b | ||
|
|
e26fbf0328 | ||
|
|
d9a52c4a2a | ||
|
|
7b2ec90de9 | ||
|
|
d310bf8bbf | ||
|
|
2abc6cc939 | ||
|
|
56d4e694a2 | ||
|
|
5f002c9cdc | ||
|
|
759daf4b4a | ||
|
|
3a8d9568e3 | ||
|
|
ff22a9d94a | ||
|
|
a6e42d5374 | ||
|
|
a2f74e0488 | ||
|
|
ee44240569 | ||
|
|
d0828744a2 | ||
|
|
6e2e576b29 | ||
|
|
bf61e27f8a | ||
|
|
c441c30b46 | ||
|
|
0e741230ea | ||
|
|
1bfe9ac2db | ||
|
|
6812e72348 | ||
|
|
b6449d2f5b | ||
|
|
7e3ea20dce | ||
|
|
c9d6fe9dcd | ||
|
|
4a649a6b8b | ||
|
|
8fef184963 | ||
|
|
69583ca3c0 | ||
|
|
6038a68e91 | ||
|
|
fa8bd8db87 | ||
|
|
18b4f0ed0f | ||
|
|
461f9d66c9 | ||
|
|
2155103c7a | ||
|
|
c9a6839c45 | ||
|
|
9fbe331a80 | ||
|
|
a56389c4ce | ||
|
|
64656784cb | ||
|
|
6eff2c181e | ||
|
|
1aa48c6d62 | ||
|
|
c7ca1a346d | ||
|
|
fa0ec7b502 | ||
|
|
768438c136 | ||
|
|
9badea0b3c | ||
|
|
43263a1650 | ||
|
|
821e02dc75 | ||
|
|
ed011ecf28 | ||
|
|
d861de4c2f | ||
|
|
3a3b2449dc | ||
|
|
d2614406ca | ||
|
|
0798d098ae | ||
|
|
dab7ddc2bb | ||
|
|
081a96e281 | ||
|
|
a7dd881d79 | ||
|
|
8134d5e24d | ||
|
|
ba6756cd45 | ||
|
|
5d8fce21ac | ||
|
|
e7e4a5bcd4 | ||
|
|
55f33357ea | ||
|
|
90568bba31 | ||
|
|
5d6e2dc2e4 | ||
|
|
6bb33f2559 | ||
|
|
ced92554ed | ||
|
|
dff3383158 | ||
|
|
bf03c89cb2 | ||
|
|
9f1484bbef | ||
|
|
3899680e26 | ||
|
|
6bb2eb25a1 | ||
|
|
f8dfd8edb3 | ||
|
|
042be624a3 | ||
|
|
6bafa4c79a | ||
|
|
58b42fac5c | ||
|
|
3b47b9558a | ||
|
|
ccf9636296 | ||
|
|
96942719f2 | ||
|
|
69cf1c1adc | ||
|
|
d77cba40b8 | ||
|
|
968735b555 | ||
|
|
ceed9d29eb | ||
|
|
41329039ee | ||
|
|
f68b102ca8 | ||
|
|
fa36e54298 | ||
|
|
b689f57435 | ||
|
|
885fa0ff56 | ||
|
|
303acb72a3 | ||
|
|
b2a46cd0cd | ||
|
|
5a5ecb3ee3 | ||
|
|
60b4ab6a63 | ||
|
|
e4b096a08f | ||
|
|
343f55049b | ||
|
|
6b46025261 | ||
|
|
5ea503f23e | ||
|
|
ce95f9ac23 | ||
|
|
c3fb87501b | ||
|
|
dc6a343612 | ||
|
|
3a61053957 | ||
|
|
570129e4d4 | ||
|
|
3315c7045f | ||
|
|
5ae50e242c | ||
|
|
bbcf449719 | ||
|
|
aab10f7184 | ||
|
|
8d43488cb8 | ||
|
|
0a9c647e19 | ||
|
|
40db5d4aa8 | ||
|
|
9254532baa | ||
|
|
7abed47cf0 | ||
|
|
5c6ac758f7 | ||
|
|
007677962c | ||
|
|
9c4aeab64a | ||
|
|
48e6fc0efe | ||
|
|
c8be713d11 | ||
|
|
ae887c8648 | ||
|
|
5daac2531b | ||
|
|
68def00327 | ||
|
|
67e7976710 | ||
|
|
35747e937e | ||
|
|
fb439787a4 | ||
|
|
8fa368f473 | ||
|
|
c84a9d07b1 | ||
|
|
7fb46cdfc4 | ||
|
|
52985e5ddc | ||
|
|
e880935dc3 | ||
|
|
cc22b1bca5 | ||
|
|
49a5128918 | ||
|
|
fedc7dcb44 | ||
|
|
cd32b20215 | ||
|
|
15cd9832c4 | ||
|
|
f25d4e4553 | ||
|
|
12d1c82b63 | ||
|
|
aebe855078 | ||
|
|
3416a71ebd | ||
|
|
94b3fea528 | ||
|
|
ad1a9ecca1 | ||
|
|
715accfb8a | ||
|
|
a8e03c6138 | ||
|
|
f69446b648 | ||
|
|
eedfbe5846 | ||
|
|
153351cc9f | ||
|
|
1b1eec40a7 | ||
|
|
763877541a | ||
|
|
1fad7d72a2 | ||
|
|
51ea2ea879 | ||
|
|
d77a478bf0 |
28
.devcontainer/.env.example
Normal file
28
.devcontainer/.env.example
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
COMPOSE_PROJECT_NAME=trmm
|
||||||
|
|
||||||
|
IMAGE_REPO=tacticalrmm/
|
||||||
|
VERSION=latest
|
||||||
|
|
||||||
|
# tactical credentials (Used to login to dashboard)
|
||||||
|
TRMM_USER=tactical
|
||||||
|
TRMM_PASS=tactical
|
||||||
|
|
||||||
|
# dns settings
|
||||||
|
APP_HOST=rmm.example.com
|
||||||
|
API_HOST=api.example.com
|
||||||
|
MESH_HOST=mesh.example.com
|
||||||
|
|
||||||
|
# mesh settings
|
||||||
|
MESH_USER=tactical
|
||||||
|
MESH_PASS=tactical
|
||||||
|
MONGODB_USER=mongouser
|
||||||
|
MONGODB_PASSWORD=mongopass
|
||||||
|
|
||||||
|
# database settings
|
||||||
|
POSTGRES_USER=postgres
|
||||||
|
POSTGRES_PASS=postgrespass
|
||||||
|
|
||||||
|
# DEV SETTINGS
|
||||||
|
APP_PORT=8000
|
||||||
|
API_PORT=8080
|
||||||
|
HTTP_PROTOCOL=https
|
||||||
28
.devcontainer/api.dockerfile
Normal file
28
.devcontainer/api.dockerfile
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
FROM python:3.8-slim
|
||||||
|
|
||||||
|
ENV TACTICAL_DIR /opt/tactical
|
||||||
|
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||||
|
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||||
|
ENV WORKSPACE_DIR /workspace
|
||||||
|
ENV TACTICAL_USER tactical
|
||||||
|
ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
RUN groupadd -g 1000 tactical && \
|
||||||
|
useradd -u 1000 -g 1000 tactical
|
||||||
|
|
||||||
|
# Copy Go Files
|
||||||
|
COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||||
|
|
||||||
|
# Copy Dev python reqs
|
||||||
|
COPY ./requirements.txt /
|
||||||
|
|
||||||
|
# Copy Docker Entrypoint
|
||||||
|
COPY ./entrypoint.sh /
|
||||||
|
RUN chmod +x /entrypoint.sh
|
||||||
|
ENTRYPOINT ["/entrypoint.sh"]
|
||||||
|
|
||||||
|
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm
|
||||||
19
.devcontainer/docker-compose.debug.yml
Normal file
19
.devcontainer/docker-compose.debug.yml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
version: '3.4'
|
||||||
|
|
||||||
|
services:
|
||||||
|
api-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 --nothreading --noreload"]
|
||||||
|
ports:
|
||||||
|
- 8000:8000
|
||||||
|
- 5678:5678
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-backend
|
||||||
209
.devcontainer/docker-compose.yml
Normal file
209
.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
version: '3.4'
|
||||||
|
|
||||||
|
services:
|
||||||
|
api-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["tactical-api"]
|
||||||
|
environment:
|
||||||
|
API_PORT: ${API_PORT}
|
||||||
|
ports:
|
||||||
|
- "8000:${API_PORT}"
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-backend
|
||||||
|
|
||||||
|
app-dev:
|
||||||
|
image: node:12-alpine
|
||||||
|
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||||
|
working_dir: /workspace/web
|
||||||
|
volumes:
|
||||||
|
- ..:/workspace:cached
|
||||||
|
ports:
|
||||||
|
- "8080:${APP_PORT}"
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-frontend
|
||||||
|
|
||||||
|
# nats
|
||||||
|
nats-dev:
|
||||||
|
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
API_HOST: ${API_HOST}
|
||||||
|
API_PORT: ${API_PORT}
|
||||||
|
DEV: 1
|
||||||
|
ports:
|
||||||
|
- "4222:4222"
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- ${API_HOST}
|
||||||
|
- tactical-nats
|
||||||
|
|
||||||
|
# meshcentral container
|
||||||
|
meshcentral-dev:
|
||||||
|
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
MESH_HOST: ${MESH_HOST}
|
||||||
|
MESH_USER: ${MESH_USER}
|
||||||
|
MESH_PASS: ${MESH_PASS}
|
||||||
|
MONGODB_USER: ${MONGODB_USER}
|
||||||
|
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||||
|
NGINX_HOST_IP: 172.21.0.20
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-meshcentral
|
||||||
|
- ${MESH_HOST}
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- mesh-data-dev:/home/node/app/meshcentral-data
|
||||||
|
depends_on:
|
||||||
|
- mongodb-dev
|
||||||
|
|
||||||
|
# mongodb container for meshcentral
|
||||||
|
mongodb-dev:
|
||||||
|
image: mongo:4.4
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
|
||||||
|
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
|
||||||
|
MONGO_INITDB_DATABASE: meshcentral
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-mongodb
|
||||||
|
volumes:
|
||||||
|
- mongo-dev-data:/data/db
|
||||||
|
|
||||||
|
# postgres database for api service
|
||||||
|
postgres-dev:
|
||||||
|
image: postgres:13-alpine
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: tacticalrmm
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASS}
|
||||||
|
volumes:
|
||||||
|
- postgres-data-dev:/var/lib/postgresql/data
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-postgres
|
||||||
|
|
||||||
|
# redis container for celery tasks
|
||||||
|
redis-dev:
|
||||||
|
restart: always
|
||||||
|
image: redis:6.0-alpine
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-redis
|
||||||
|
|
||||||
|
init-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
restart: on-failure
|
||||||
|
command: ["tactical-init-dev"]
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER}
|
||||||
|
POSTGRES_PASS: ${POSTGRES_PASS}
|
||||||
|
APP_HOST: ${APP_HOST}
|
||||||
|
API_HOST: ${API_HOST}
|
||||||
|
MESH_HOST: ${MESH_HOST}
|
||||||
|
MESH_USER: ${MESH_USER}
|
||||||
|
TRMM_USER: ${TRMM_USER}
|
||||||
|
TRMM_PASS: ${TRMM_PASS}
|
||||||
|
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
|
||||||
|
APP_PORT: ${APP_PORT}
|
||||||
|
depends_on:
|
||||||
|
- postgres-dev
|
||||||
|
- meshcentral-dev
|
||||||
|
networks:
|
||||||
|
- dev
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
|
||||||
|
# container for celery worker service
|
||||||
|
celery-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["tactical-celery-dev"]
|
||||||
|
restart: always
|
||||||
|
networks:
|
||||||
|
- dev
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
depends_on:
|
||||||
|
- postgres-dev
|
||||||
|
- redis-dev
|
||||||
|
|
||||||
|
# container for celery beat service
|
||||||
|
celerybeat-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["tactical-celerybeat-dev"]
|
||||||
|
restart: always
|
||||||
|
networks:
|
||||||
|
- dev
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
depends_on:
|
||||||
|
- postgres-dev
|
||||||
|
- redis-dev
|
||||||
|
|
||||||
|
nginx-dev:
|
||||||
|
# container for tactical reverse proxy
|
||||||
|
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
APP_HOST: ${APP_HOST}
|
||||||
|
API_HOST: ${API_HOST}
|
||||||
|
MESH_HOST: ${MESH_HOST}
|
||||||
|
CERT_PUB_KEY: ${CERT_PUB_KEY}
|
||||||
|
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||||
|
APP_PORT: ${APP_PORT}
|
||||||
|
API_PORT: ${API_PORT}
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
ipv4_address: 172.21.0.20
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
- "443:443"
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
tactical-data-dev:
|
||||||
|
postgres-data-dev:
|
||||||
|
mongo-dev-data:
|
||||||
|
mesh-data-dev:
|
||||||
|
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
driver: bridge
|
||||||
|
ipam:
|
||||||
|
driver: default
|
||||||
|
config:
|
||||||
|
- subnet: 172.21.0.0/24
|
||||||
169
.devcontainer/entrypoint.sh
Normal file
169
.devcontainer/entrypoint.sh
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
: "${TRMM_USER:=tactical}"
|
||||||
|
: "${TRMM_PASS:=tactical}"
|
||||||
|
: "${POSTGRES_HOST:=tactical-postgres}"
|
||||||
|
: "${POSTGRES_PORT:=5432}"
|
||||||
|
: "${POSTGRES_USER:=tactical}"
|
||||||
|
: "${POSTGRES_PASS:=tactical}"
|
||||||
|
: "${POSTGRES_DB:=tacticalrmm}"
|
||||||
|
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
||||||
|
: "${MESH_USER:=meshcentral}"
|
||||||
|
: "${MESH_PASS:=meshcentralpass}"
|
||||||
|
: "${MESH_HOST:=tactical-meshcentral}"
|
||||||
|
: "${API_HOST:=tactical-backend}"
|
||||||
|
: "${APP_HOST:=tactical-frontend}"
|
||||||
|
: "${REDIS_HOST:=tactical-redis}"
|
||||||
|
: "${HTTP_PROTOCOL:=http}"
|
||||||
|
: "${APP_PORT:=8080}"
|
||||||
|
: "${API_PORT:=8000}"
|
||||||
|
|
||||||
|
# Add python venv to path
|
||||||
|
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
||||||
|
|
||||||
|
function check_tactical_ready {
|
||||||
|
sleep 15
|
||||||
|
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||||
|
echo "waiting for init container to finish install or update..."
|
||||||
|
sleep 10
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
function django_setup {
|
||||||
|
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||||
|
echo "waiting for postgresql container to be ready..."
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
|
||||||
|
until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do
|
||||||
|
echo "waiting for meshcentral container to be ready..."
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "setting up django environment"
|
||||||
|
|
||||||
|
# configure django settings
|
||||||
|
MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token)
|
||||||
|
|
||||||
|
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||||
|
|
||||||
|
localvars="$(cat << EOF
|
||||||
|
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||||
|
|
||||||
|
DEBUG = True
|
||||||
|
|
||||||
|
DOCKER_BUILD = True
|
||||||
|
|
||||||
|
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
|
||||||
|
KEY_FILE = '/opt/tactical/certs/privkey.pem'
|
||||||
|
|
||||||
|
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ['${API_HOST}', '*']
|
||||||
|
|
||||||
|
ADMIN_URL = 'admin/'
|
||||||
|
|
||||||
|
CORS_ORIGIN_ALLOW_ALL = True
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
'default': {
|
||||||
|
'ENGINE': 'django.db.backends.postgresql',
|
||||||
|
'NAME': '${POSTGRES_DB}',
|
||||||
|
'USER': '${POSTGRES_USER}',
|
||||||
|
'PASSWORD': '${POSTGRES_PASS}',
|
||||||
|
'HOST': '${POSTGRES_HOST}',
|
||||||
|
'PORT': '${POSTGRES_PORT}',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
REST_FRAMEWORK = {
|
||||||
|
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
|
||||||
|
|
||||||
|
'DEFAULT_PERMISSION_CLASSES': (
|
||||||
|
'rest_framework.permissions.IsAuthenticated',
|
||||||
|
),
|
||||||
|
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||||
|
'knox.auth.TokenAuthentication',
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
if not DEBUG:
|
||||||
|
REST_FRAMEWORK.update({
|
||||||
|
'DEFAULT_RENDERER_CLASSES': (
|
||||||
|
'rest_framework.renderers.JSONRenderer',
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
MESH_USERNAME = '${MESH_USER}'
|
||||||
|
MESH_SITE = 'https://${MESH_HOST}'
|
||||||
|
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||||
|
REDIS_HOST = '${REDIS_HOST}'
|
||||||
|
EOF
|
||||||
|
)"
|
||||||
|
|
||||||
|
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||||
|
|
||||||
|
# run migrations and init scripts
|
||||||
|
python manage.py migrate --no-input
|
||||||
|
python manage.py collectstatic --no-input
|
||||||
|
python manage.py initial_db_setup
|
||||||
|
python manage.py initial_mesh_setup
|
||||||
|
python manage.py load_chocos
|
||||||
|
python manage.py load_community_scripts
|
||||||
|
python manage.py reload_nats
|
||||||
|
|
||||||
|
# create super user
|
||||||
|
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-init-dev' ]; then
|
||||||
|
|
||||||
|
# make directories if they don't exist
|
||||||
|
mkdir -p ${TACTICAL_DIR}/tmp
|
||||||
|
|
||||||
|
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||||
|
|
||||||
|
# setup Python virtual env and install dependencies
|
||||||
|
test -f ${VIRTUAL_ENV} && python -m venv --copies ${VIRTUAL_ENV}
|
||||||
|
pip install --no-cache-dir -r /requirements.txt
|
||||||
|
|
||||||
|
django_setup
|
||||||
|
|
||||||
|
# create .env file for frontend
|
||||||
|
webenv="$(cat << EOF
|
||||||
|
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||||
|
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||||
|
APP_URL = https://${APP_HOST}
|
||||||
|
EOF
|
||||||
|
)"
|
||||||
|
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
||||||
|
|
||||||
|
# chown everything to tactical user
|
||||||
|
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||||
|
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
|
||||||
|
|
||||||
|
# create install ready file
|
||||||
|
su -c "echo 'tactical-init' > ${TACTICAL_READY_FILE}" "${TACTICAL_USER}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-api' ]; then
|
||||||
|
cp ${WORKSPACE_DIR}/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||||
|
chmod +x /usr/local/bin/goversioninfo
|
||||||
|
|
||||||
|
check_tactical_ready
|
||||||
|
python manage.py runserver 0.0.0.0:${API_PORT}
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-celery-dev' ]; then
|
||||||
|
check_tactical_ready
|
||||||
|
env/bin/celery -A tacticalrmm worker -l debug
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||||
|
check_tactical_ready
|
||||||
|
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||||
|
env/bin/celery -A tacticalrmm beat -l debug
|
||||||
|
fi
|
||||||
44
.devcontainer/requirements.txt
Normal file
44
.devcontainer/requirements.txt
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||||
|
amqp==2.6.1
|
||||||
|
asgiref==3.3.1
|
||||||
|
asyncio-nats-client==0.11.4
|
||||||
|
billiard==3.6.3.0
|
||||||
|
celery==4.4.6
|
||||||
|
certifi==2020.12.5
|
||||||
|
cffi==1.14.3
|
||||||
|
chardet==3.0.4
|
||||||
|
cryptography==3.2.1
|
||||||
|
decorator==4.4.2
|
||||||
|
Django==3.1.4
|
||||||
|
django-cors-headers==3.5.0
|
||||||
|
django-rest-knox==4.1.0
|
||||||
|
djangorestframework==3.12.2
|
||||||
|
future==0.18.2
|
||||||
|
idna==2.10
|
||||||
|
kombu==4.6.11
|
||||||
|
loguru==0.5.3
|
||||||
|
msgpack==1.0.0
|
||||||
|
packaging==20.4
|
||||||
|
psycopg2-binary==2.8.6
|
||||||
|
pycparser==2.20
|
||||||
|
pycryptodome==3.9.9
|
||||||
|
pyotp==2.4.1
|
||||||
|
pyparsing==2.4.7
|
||||||
|
pytz==2020.4
|
||||||
|
qrcode==6.1
|
||||||
|
redis==3.5.3
|
||||||
|
requests==2.25.0
|
||||||
|
six==1.15.0
|
||||||
|
sqlparse==0.4.1
|
||||||
|
twilio==6.49.0
|
||||||
|
urllib3==1.26.2
|
||||||
|
validators==0.18.1
|
||||||
|
vine==1.3.0
|
||||||
|
websockets==8.1
|
||||||
|
zipp==3.4.0
|
||||||
|
black
|
||||||
|
Werkzeug
|
||||||
|
django-extensions
|
||||||
|
coverage
|
||||||
|
coveralls
|
||||||
|
model_bakery
|
||||||
@@ -1,5 +1,25 @@
|
|||||||
.git
|
**/__pycache__
|
||||||
.cache
|
**/.classpath
|
||||||
**/*.env
|
**/.dockerignore
|
||||||
**/env
|
**/.env
|
||||||
|
**/.git
|
||||||
|
**/.gitignore
|
||||||
|
**/.project
|
||||||
|
**/.settings
|
||||||
|
**/.toolstarget
|
||||||
|
**/.vs
|
||||||
|
**/.vscode
|
||||||
|
**/*.*proj.user
|
||||||
|
**/*.dbmdl
|
||||||
|
**/*.jfm
|
||||||
|
**/azds.yaml
|
||||||
|
**/charts
|
||||||
|
**/docker-compose*
|
||||||
|
**/Dockerfile*
|
||||||
**/node_modules
|
**/node_modules
|
||||||
|
**/npm-debug.log
|
||||||
|
**/obj
|
||||||
|
**/secrets.dev.yaml
|
||||||
|
**/values.dev.yaml
|
||||||
|
**/env
|
||||||
|
README.md
|
||||||
|
|||||||
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: wh1te909
|
||||||
|
patreon: # Replace with a single Patreon username
|
||||||
|
open_collective: # Replace with a single Open Collective username
|
||||||
|
ko_fi: # Replace with a single Ko-fi username
|
||||||
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
|
otechie: # Replace with a single Otechie username
|
||||||
|
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||||
10
.github/workflows/docker-build-push.yml
vendored
10
.github/workflows/docker-build-push.yml
vendored
@@ -57,16 +57,6 @@ jobs:
|
|||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
||||||
|
|
||||||
- name: Build and Push Tactical Salt Image
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
pull: true
|
|
||||||
file: ./docker/containers/tactical-salt/dockerfile
|
|
||||||
platforms: linux/amd64
|
|
||||||
tags: tacticalrmm/tactical-salt:${{ steps.prep.outputs.version }},tacticalrmm/tactical-salt:latest
|
|
||||||
|
|
||||||
- name: Build and Push Tactical Frontend Image
|
- name: Build and Push Tactical Frontend Image
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
|
|||||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -14,6 +14,20 @@
|
|||||||
"0.0.0.0:8000"
|
"0.0.0.0:8000"
|
||||||
],
|
],
|
||||||
"django": true
|
"django": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Django: Docker Remote Attach",
|
||||||
|
"type": "python",
|
||||||
|
"request": "attach",
|
||||||
|
"port": 5678,
|
||||||
|
"host": "localhost",
|
||||||
|
"preLaunchTask": "docker debug",
|
||||||
|
"pathMappings": [
|
||||||
|
{
|
||||||
|
"localRoot": "${workspaceFolder}/api/tacticalrmm",
|
||||||
|
"remoteRoot": "/workspace/api/tacticalrmm"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
19
.vscode/settings.json
vendored
19
.vscode/settings.json
vendored
@@ -41,4 +41,23 @@
|
|||||||
"**/*.zip": true
|
"**/*.zip": true
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"go.useLanguageServer": true,
|
||||||
|
"[go]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": false,
|
||||||
|
},
|
||||||
|
"editor.snippetSuggestions": "none",
|
||||||
|
},
|
||||||
|
"[go.mod]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"gopls": {
|
||||||
|
"usePlaceholders": true,
|
||||||
|
"completeUnimported": true,
|
||||||
|
"staticcheck": true,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
23
.vscode/tasks.json
vendored
Normal file
23
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||||
|
// for the documentation about the tasks.json format
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "docker debug",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "docker-compose",
|
||||||
|
"args": [
|
||||||
|
"-p",
|
||||||
|
"trmm",
|
||||||
|
"-f",
|
||||||
|
".devcontainer/docker-compose.yml",
|
||||||
|
"-f",
|
||||||
|
".devcontainer/docker-compose.debug.yml",
|
||||||
|
"up",
|
||||||
|
"-d",
|
||||||
|
"--build"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -6,7 +6,7 @@
|
|||||||
[](https://github.com/python/black)
|
[](https://github.com/python/black)
|
||||||
|
|
||||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang, as well as the [SaltStack](https://github.com/saltstack/salt) api and [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||||
|
|
||||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||||
@@ -36,7 +36,7 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
|||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
### Requirements
|
### Requirements
|
||||||
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04)
|
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10)
|
||||||
- A domain you own with at least 3 subdomains
|
- A domain you own with at least 3 subdomains
|
||||||
- Google Authenticator app (2 factor is NOT optional)
|
- Google Authenticator app (2 factor is NOT optional)
|
||||||
|
|
||||||
@@ -62,7 +62,6 @@ sudo ufw default allow outgoing
|
|||||||
sudo ufw allow ssh
|
sudo ufw allow ssh
|
||||||
sudo ufw allow http
|
sudo ufw allow http
|
||||||
sudo ufw allow https
|
sudo ufw allow https
|
||||||
sudo ufw allow proto tcp from any to any port 4505,4506
|
|
||||||
sudo ufw allow proto tcp from any to any port 4222
|
sudo ufw allow proto tcp from any to any port 4222
|
||||||
sudo ufw enable && sudo ufw reload
|
sudo ufw enable && sudo ufw reload
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,457 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
import psutil
|
|
||||||
import os
|
|
||||||
import datetime
|
|
||||||
import zlib
|
|
||||||
import json
|
|
||||||
import base64
|
|
||||||
import wmi
|
|
||||||
import win32evtlog
|
|
||||||
import win32con
|
|
||||||
import win32evtlogutil
|
|
||||||
import winerror
|
|
||||||
from time import sleep
|
|
||||||
import requests
|
|
||||||
import subprocess
|
|
||||||
import random
|
|
||||||
import platform
|
|
||||||
|
|
||||||
ARCH = "64" if platform.machine().endswith("64") else "32"
|
|
||||||
PROGRAM_DIR = os.path.join(os.environ["ProgramFiles"], "TacticalAgent")
|
|
||||||
TAC_RMM = os.path.join(PROGRAM_DIR, "tacticalrmm.exe")
|
|
||||||
NSSM = os.path.join(PROGRAM_DIR, "nssm.exe" if ARCH == "64" else "nssm-x86.exe")
|
|
||||||
TEMP_DIR = os.path.join(os.environ["WINDIR"], "Temp")
|
|
||||||
SYS_DRIVE = os.environ["SystemDrive"]
|
|
||||||
PY_BIN = os.path.join(SYS_DRIVE, "\\salt", "bin", "python.exe")
|
|
||||||
SALT_CALL = os.path.join(SYS_DRIVE, "\\salt", "salt-call.bat")
|
|
||||||
|
|
||||||
|
|
||||||
def get_services():
|
|
||||||
# see https://github.com/wh1te909/tacticalrmm/issues/38
|
|
||||||
# for why I am manually implementing the svc.as_dict() method of psutil
|
|
||||||
ret = []
|
|
||||||
for svc in psutil.win_service_iter():
|
|
||||||
i = {}
|
|
||||||
try:
|
|
||||||
i["display_name"] = svc.display_name()
|
|
||||||
i["binpath"] = svc.binpath()
|
|
||||||
i["username"] = svc.username()
|
|
||||||
i["start_type"] = svc.start_type()
|
|
||||||
i["status"] = svc.status()
|
|
||||||
i["pid"] = svc.pid()
|
|
||||||
i["name"] = svc.name()
|
|
||||||
i["description"] = svc.description()
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
ret.append(i)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def run_python_script(filename, timeout, script_type="userdefined"):
|
|
||||||
# no longer used in agent version 0.11.0
|
|
||||||
file_path = os.path.join(TEMP_DIR, filename)
|
|
||||||
|
|
||||||
if os.path.exists(file_path):
|
|
||||||
try:
|
|
||||||
os.remove(file_path)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if script_type == "userdefined":
|
|
||||||
__salt__["cp.get_file"](f"salt://scripts/userdefined/{filename}", file_path)
|
|
||||||
else:
|
|
||||||
__salt__["cp.get_file"](f"salt://scripts/{filename}", file_path)
|
|
||||||
|
|
||||||
return __salt__["cmd.run_all"](f"{PY_BIN} {file_path}", timeout=timeout)
|
|
||||||
|
|
||||||
|
|
||||||
def run_script(filepath, filename, shell, timeout, args=[], bg=False):
|
|
||||||
if shell == "powershell" or shell == "cmd":
|
|
||||||
if args:
|
|
||||||
return __salt__["cmd.script"](
|
|
||||||
source=filepath,
|
|
||||||
args=" ".join(map(lambda x: f'"{x}"', args)),
|
|
||||||
shell=shell,
|
|
||||||
timeout=timeout,
|
|
||||||
bg=bg,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return __salt__["cmd.script"](
|
|
||||||
source=filepath, shell=shell, timeout=timeout, bg=bg
|
|
||||||
)
|
|
||||||
|
|
||||||
elif shell == "python":
|
|
||||||
file_path = os.path.join(TEMP_DIR, filename)
|
|
||||||
|
|
||||||
if os.path.exists(file_path):
|
|
||||||
try:
|
|
||||||
os.remove(file_path)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
__salt__["cp.get_file"](filepath, file_path)
|
|
||||||
|
|
||||||
salt_cmd = "cmd.run_bg" if bg else "cmd.run_all"
|
|
||||||
|
|
||||||
if args:
|
|
||||||
a = " ".join(map(lambda x: f'"{x}"', args))
|
|
||||||
cmd = f"{PY_BIN} {file_path} {a}"
|
|
||||||
return __salt__[salt_cmd](cmd, timeout=timeout)
|
|
||||||
else:
|
|
||||||
return __salt__[salt_cmd](f"{PY_BIN} {file_path}", timeout=timeout)
|
|
||||||
|
|
||||||
|
|
||||||
def uninstall_agent():
|
|
||||||
remove_exe = os.path.join(PROGRAM_DIR, "unins000.exe")
|
|
||||||
__salt__["cmd.run_bg"]([remove_exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"])
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
def update_salt():
|
|
||||||
for p in psutil.process_iter():
|
|
||||||
with p.oneshot():
|
|
||||||
if p.name() == "tacticalrmm.exe" and "updatesalt" in p.cmdline():
|
|
||||||
return "running"
|
|
||||||
|
|
||||||
from subprocess import Popen, PIPE
|
|
||||||
|
|
||||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
|
||||||
DETACHED_PROCESS = 0x00000008
|
|
||||||
cmd = [TAC_RMM, "-m", "updatesalt"]
|
|
||||||
p = Popen(
|
|
||||||
cmd,
|
|
||||||
stdin=PIPE,
|
|
||||||
stdout=PIPE,
|
|
||||||
stderr=PIPE,
|
|
||||||
close_fds=True,
|
|
||||||
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
|
||||||
)
|
|
||||||
return p.pid
|
|
||||||
|
|
||||||
|
|
||||||
def run_manual_checks():
|
|
||||||
__salt__["cmd.run_bg"]([TAC_RMM, "-m", "runchecks"])
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
def install_updates():
|
|
||||||
for p in psutil.process_iter():
|
|
||||||
with p.oneshot():
|
|
||||||
if p.name() == "tacticalrmm.exe" and "winupdater" in p.cmdline():
|
|
||||||
return "running"
|
|
||||||
|
|
||||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "winupdater"])
|
|
||||||
|
|
||||||
|
|
||||||
def _wait_for_service(svc, status, retries=10):
|
|
||||||
attempts = 0
|
|
||||||
while 1:
|
|
||||||
try:
|
|
||||||
service = psutil.win_service_get(svc)
|
|
||||||
except psutil.NoSuchProcess:
|
|
||||||
stat = "fail"
|
|
||||||
attempts += 1
|
|
||||||
sleep(5)
|
|
||||||
else:
|
|
||||||
stat = service.status()
|
|
||||||
if stat != status:
|
|
||||||
attempts += 1
|
|
||||||
sleep(5)
|
|
||||||
else:
|
|
||||||
attempts = 0
|
|
||||||
|
|
||||||
if attempts == 0 or attempts > retries:
|
|
||||||
break
|
|
||||||
|
|
||||||
return stat
|
|
||||||
|
|
||||||
|
|
||||||
def agent_update_v2(inno, url):
|
|
||||||
# make sure another instance of the update is not running
|
|
||||||
# this function spawns 2 instances of itself (because we call it twice with salt run_bg)
|
|
||||||
# so if more than 2 running, don't continue as an update is already running
|
|
||||||
count = 0
|
|
||||||
for p in psutil.process_iter():
|
|
||||||
try:
|
|
||||||
with p.oneshot():
|
|
||||||
if "win_agent.agent_update_v2" in p.cmdline():
|
|
||||||
count += 1
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if count > 2:
|
|
||||||
return "already running"
|
|
||||||
|
|
||||||
sleep(random.randint(1, 20)) # don't flood the rmm
|
|
||||||
|
|
||||||
exe = os.path.join(TEMP_DIR, inno)
|
|
||||||
|
|
||||||
if os.path.exists(exe):
|
|
||||||
try:
|
|
||||||
os.remove(exe)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
r = requests.get(url, stream=True, timeout=600)
|
|
||||||
except Exception:
|
|
||||||
return "failed"
|
|
||||||
|
|
||||||
if r.status_code != 200:
|
|
||||||
return "failed"
|
|
||||||
|
|
||||||
with open(exe, "wb") as f:
|
|
||||||
for chunk in r.iter_content(chunk_size=1024):
|
|
||||||
if chunk:
|
|
||||||
f.write(chunk)
|
|
||||||
del r
|
|
||||||
|
|
||||||
ret = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=120)
|
|
||||||
|
|
||||||
tac = _wait_for_service(svc="tacticalagent", status="running")
|
|
||||||
if tac != "running":
|
|
||||||
subprocess.run([NSSM, "start", "tacticalagent"], timeout=30)
|
|
||||||
|
|
||||||
chk = _wait_for_service(svc="checkrunner", status="running")
|
|
||||||
if chk != "running":
|
|
||||||
subprocess.run([NSSM, "start", "checkrunner"], timeout=30)
|
|
||||||
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
def do_agent_update_v2(inno, url):
|
|
||||||
return __salt__["cmd.run_bg"](
|
|
||||||
[
|
|
||||||
SALT_CALL,
|
|
||||||
"win_agent.agent_update_v2",
|
|
||||||
f"inno={inno}",
|
|
||||||
f"url={url}",
|
|
||||||
"--local",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def agent_update(version, url):
|
|
||||||
# make sure another instance of the update is not running
|
|
||||||
# this function spawns 2 instances of itself so if more than 2 running,
|
|
||||||
# don't continue as an update is already running
|
|
||||||
count = 0
|
|
||||||
for p in psutil.process_iter():
|
|
||||||
try:
|
|
||||||
with p.oneshot():
|
|
||||||
if "win_agent.agent_update" in p.cmdline():
|
|
||||||
count += 1
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if count > 2:
|
|
||||||
return "already running"
|
|
||||||
|
|
||||||
sleep(random.randint(1, 60)) # don't flood the rmm
|
|
||||||
try:
|
|
||||||
r = requests.get(url, stream=True, timeout=600)
|
|
||||||
except Exception:
|
|
||||||
return "failed"
|
|
||||||
|
|
||||||
if r.status_code != 200:
|
|
||||||
return "failed"
|
|
||||||
|
|
||||||
exe = os.path.join(TEMP_DIR, f"winagent-v{version}.exe")
|
|
||||||
|
|
||||||
with open(exe, "wb") as f:
|
|
||||||
for chunk in r.iter_content(chunk_size=1024):
|
|
||||||
if chunk:
|
|
||||||
f.write(chunk)
|
|
||||||
del r
|
|
||||||
|
|
||||||
services = ("tacticalagent", "checkrunner")
|
|
||||||
|
|
||||||
for svc in services:
|
|
||||||
subprocess.run([NSSM, "stop", svc], timeout=120)
|
|
||||||
|
|
||||||
sleep(10)
|
|
||||||
r = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=300)
|
|
||||||
sleep(30)
|
|
||||||
|
|
||||||
for svc in services:
|
|
||||||
subprocess.run([NSSM, "start", svc], timeout=120)
|
|
||||||
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
def do_agent_update(version, url):
|
|
||||||
return __salt__["cmd.run_bg"](
|
|
||||||
[
|
|
||||||
SALT_CALL,
|
|
||||||
"win_agent.agent_update",
|
|
||||||
f"version={version}",
|
|
||||||
f"url={url}",
|
|
||||||
"--local",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SystemDetail:
|
|
||||||
def __init__(self):
|
|
||||||
self.c = wmi.WMI()
|
|
||||||
self.comp_sys_prod = self.c.Win32_ComputerSystemProduct()
|
|
||||||
self.comp_sys = self.c.Win32_ComputerSystem()
|
|
||||||
self.memory = self.c.Win32_PhysicalMemory()
|
|
||||||
self.os = self.c.Win32_OperatingSystem()
|
|
||||||
self.base_board = self.c.Win32_BaseBoard()
|
|
||||||
self.bios = self.c.Win32_BIOS()
|
|
||||||
self.disk = self.c.Win32_DiskDrive()
|
|
||||||
self.network_adapter = self.c.Win32_NetworkAdapter()
|
|
||||||
self.network_config = self.c.Win32_NetworkAdapterConfiguration()
|
|
||||||
self.desktop_monitor = self.c.Win32_DesktopMonitor()
|
|
||||||
self.cpu = self.c.Win32_Processor()
|
|
||||||
self.usb = self.c.Win32_USBController()
|
|
||||||
|
|
||||||
def get_all(self, obj):
|
|
||||||
ret = []
|
|
||||||
for i in obj:
|
|
||||||
tmp = [
|
|
||||||
{j: getattr(i, j)}
|
|
||||||
for j in list(i.properties)
|
|
||||||
if getattr(i, j) is not None
|
|
||||||
]
|
|
||||||
ret.append(tmp)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def system_info():
|
|
||||||
info = SystemDetail()
|
|
||||||
return {
|
|
||||||
"comp_sys_prod": info.get_all(info.comp_sys_prod),
|
|
||||||
"comp_sys": info.get_all(info.comp_sys),
|
|
||||||
"mem": info.get_all(info.memory),
|
|
||||||
"os": info.get_all(info.os),
|
|
||||||
"base_board": info.get_all(info.base_board),
|
|
||||||
"bios": info.get_all(info.bios),
|
|
||||||
"disk": info.get_all(info.disk),
|
|
||||||
"network_adapter": info.get_all(info.network_adapter),
|
|
||||||
"network_config": info.get_all(info.network_config),
|
|
||||||
"desktop_monitor": info.get_all(info.desktop_monitor),
|
|
||||||
"cpu": info.get_all(info.cpu),
|
|
||||||
"usb": info.get_all(info.usb),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def local_sys_info():
|
|
||||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "sysinfo"])
|
|
||||||
|
|
||||||
|
|
||||||
def get_procs():
|
|
||||||
ret = []
|
|
||||||
|
|
||||||
# setup
|
|
||||||
for proc in psutil.process_iter():
|
|
||||||
with proc.oneshot():
|
|
||||||
proc.cpu_percent(interval=None)
|
|
||||||
|
|
||||||
# need time for psutil to record cpu percent
|
|
||||||
sleep(1)
|
|
||||||
|
|
||||||
for c, proc in enumerate(psutil.process_iter(), 1):
|
|
||||||
x = {}
|
|
||||||
with proc.oneshot():
|
|
||||||
if proc.pid == 0 or not proc.name():
|
|
||||||
continue
|
|
||||||
|
|
||||||
x["name"] = proc.name()
|
|
||||||
x["cpu_percent"] = proc.cpu_percent(interval=None) / psutil.cpu_count()
|
|
||||||
x["memory_percent"] = proc.memory_percent()
|
|
||||||
x["pid"] = proc.pid
|
|
||||||
x["ppid"] = proc.ppid()
|
|
||||||
x["status"] = proc.status()
|
|
||||||
x["username"] = proc.username()
|
|
||||||
x["id"] = c
|
|
||||||
|
|
||||||
ret.append(x)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def _compress_json(j):
|
|
||||||
return {
|
|
||||||
"wineventlog": base64.b64encode(
|
|
||||||
zlib.compress(json.dumps(j).encode("utf-8", errors="ignore"))
|
|
||||||
).decode("ascii", errors="ignore")
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_eventlog(logtype, last_n_days):
|
|
||||||
|
|
||||||
start_time = datetime.datetime.now() - datetime.timedelta(days=last_n_days)
|
|
||||||
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ
|
|
||||||
|
|
||||||
status_dict = {
|
|
||||||
win32con.EVENTLOG_AUDIT_FAILURE: "AUDIT_FAILURE",
|
|
||||||
win32con.EVENTLOG_AUDIT_SUCCESS: "AUDIT_SUCCESS",
|
|
||||||
win32con.EVENTLOG_INFORMATION_TYPE: "INFO",
|
|
||||||
win32con.EVENTLOG_WARNING_TYPE: "WARNING",
|
|
||||||
win32con.EVENTLOG_ERROR_TYPE: "ERROR",
|
|
||||||
0: "INFO",
|
|
||||||
}
|
|
||||||
|
|
||||||
computer = "localhost"
|
|
||||||
hand = win32evtlog.OpenEventLog(computer, logtype)
|
|
||||||
total = win32evtlog.GetNumberOfEventLogRecords(hand)
|
|
||||||
log = []
|
|
||||||
uid = 0
|
|
||||||
done = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
while 1:
|
|
||||||
events = win32evtlog.ReadEventLog(hand, flags, 0)
|
|
||||||
for ev_obj in events:
|
|
||||||
|
|
||||||
uid += 1
|
|
||||||
# return once total number of events reach or we'll be stuck in an infinite loop
|
|
||||||
if uid >= total:
|
|
||||||
done = True
|
|
||||||
break
|
|
||||||
|
|
||||||
the_time = ev_obj.TimeGenerated.Format()
|
|
||||||
time_obj = datetime.datetime.strptime(the_time, "%c")
|
|
||||||
if time_obj < start_time:
|
|
||||||
done = True
|
|
||||||
break
|
|
||||||
|
|
||||||
computer = str(ev_obj.ComputerName)
|
|
||||||
src = str(ev_obj.SourceName)
|
|
||||||
evt_type = str(status_dict[ev_obj.EventType])
|
|
||||||
evt_id = str(winerror.HRESULT_CODE(ev_obj.EventID))
|
|
||||||
evt_category = str(ev_obj.EventCategory)
|
|
||||||
record = str(ev_obj.RecordNumber)
|
|
||||||
msg = (
|
|
||||||
str(win32evtlogutil.SafeFormatMessage(ev_obj, logtype))
|
|
||||||
.replace("<", "")
|
|
||||||
.replace(">", "")
|
|
||||||
)
|
|
||||||
|
|
||||||
event_dict = {
|
|
||||||
"computer": computer,
|
|
||||||
"source": src,
|
|
||||||
"eventType": evt_type,
|
|
||||||
"eventID": evt_id,
|
|
||||||
"eventCategory": evt_category,
|
|
||||||
"message": msg,
|
|
||||||
"time": the_time,
|
|
||||||
"record": record,
|
|
||||||
"uid": uid,
|
|
||||||
}
|
|
||||||
|
|
||||||
log.append(event_dict)
|
|
||||||
|
|
||||||
if done:
|
|
||||||
break
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
win32evtlog.CloseEventLog(hand)
|
|
||||||
return _compress_json(log)
|
|
||||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('accounts', '0002_auto_20200810_0544'),
|
("accounts", "0002_auto_20200810_0544"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
field=models.DateTimeField(auto_now=True, null=True),
|
field=models.DateTimeField(auto_now=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,24 +6,24 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('accounts', '0003_auto_20200922_1344'),
|
("accounts", "0003_auto_20200922_1344"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('accounts', '0004_auto_20201002_1257'),
|
("accounts", "0004_auto_20201002_1257"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
field=models.DateTimeField(auto_now=True, null=True),
|
field=models.DateTimeField(auto_now=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,13 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('accounts', '0007_update_agent_primary_key'),
|
("accounts", "0007_update_agent_primary_key"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='dark_mode',
|
name="dark_mode",
|
||||||
field=models.BooleanField(default=True),
|
field=models.BooleanField(default=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2020-12-10 17:00
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("accounts", "0008_user_dark_mode"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="show_community_scripts",
|
||||||
|
field=models.BooleanField(default=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-14 01:23
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("accounts", "0009_user_show_community_scripts"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="agent_dblclick_action",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("editagent", "Edit Agent"),
|
||||||
|
("takecontrol", "Take Control"),
|
||||||
|
("remotebg", "Remote Background"),
|
||||||
|
],
|
||||||
|
default="editagent",
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 3.1.5 on 2021-01-18 09:40
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("accounts", "0010_user_agent_dblclick_action"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="default_agent_tbl_tab",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("server", "Servers"),
|
||||||
|
("workstation", "Workstations"),
|
||||||
|
("mixed", "Mixed"),
|
||||||
|
],
|
||||||
|
default="server",
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -3,11 +3,30 @@ from django.contrib.auth.models import AbstractUser
|
|||||||
|
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
|
||||||
|
AGENT_DBLCLICK_CHOICES = [
|
||||||
|
("editagent", "Edit Agent"),
|
||||||
|
("takecontrol", "Take Control"),
|
||||||
|
("remotebg", "Remote Background"),
|
||||||
|
]
|
||||||
|
|
||||||
|
AGENT_TBL_TAB_CHOICES = [
|
||||||
|
("server", "Servers"),
|
||||||
|
("workstation", "Workstations"),
|
||||||
|
("mixed", "Mixed"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class User(AbstractUser, BaseAuditModel):
|
class User(AbstractUser, BaseAuditModel):
|
||||||
is_active = models.BooleanField(default=True)
|
is_active = models.BooleanField(default=True)
|
||||||
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
||||||
dark_mode = models.BooleanField(default=True)
|
dark_mode = models.BooleanField(default=True)
|
||||||
|
show_community_scripts = models.BooleanField(default=True)
|
||||||
|
agent_dblclick_action = models.CharField(
|
||||||
|
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
||||||
|
)
|
||||||
|
default_agent_tbl_tab = models.CharField(
|
||||||
|
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
|
||||||
|
)
|
||||||
|
|
||||||
agent = models.OneToOneField(
|
agent = models.OneToOneField(
|
||||||
"agents.Agent",
|
"agents.Agent",
|
||||||
|
|||||||
@@ -155,6 +155,33 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_put_root_user(self):
|
||||||
|
url = f"/accounts/{self.john.pk}/users/"
|
||||||
|
data = {
|
||||||
|
"id": self.john.pk,
|
||||||
|
"username": "john",
|
||||||
|
"email": "johndoe@xlawgaming.com",
|
||||||
|
"first_name": "John",
|
||||||
|
"last_name": "Doe",
|
||||||
|
}
|
||||||
|
r = self.client.put(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_put_not_root_user(self):
|
||||||
|
url = f"/accounts/{self.john.pk}/users/"
|
||||||
|
data = {
|
||||||
|
"id": self.john.pk,
|
||||||
|
"username": "john",
|
||||||
|
"email": "johndoe@xlawgaming.com",
|
||||||
|
"first_name": "John",
|
||||||
|
"last_name": "Doe",
|
||||||
|
}
|
||||||
|
self.client.force_authenticate(user=self.alice)
|
||||||
|
r = self.client.put(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
def test_delete(self):
|
def test_delete(self):
|
||||||
url = f"/accounts/{self.john.pk}/users/"
|
url = f"/accounts/{self.john.pk}/users/"
|
||||||
r = self.client.delete(url)
|
r = self.client.delete(url)
|
||||||
@@ -166,6 +193,19 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_delete_root_user(self):
|
||||||
|
url = f"/accounts/{self.john.pk}/users/"
|
||||||
|
r = self.client.delete(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_delete_non_root_user(self):
|
||||||
|
url = f"/accounts/{self.john.pk}/users/"
|
||||||
|
self.client.force_authenticate(user=self.alice)
|
||||||
|
r = self.client.delete(url)
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
|
|
||||||
class TestUserAction(TacticalTestCase):
|
class TestUserAction(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@@ -184,6 +224,21 @@ class TestUserAction(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_post_root_user(self):
|
||||||
|
url = "/accounts/users/reset/"
|
||||||
|
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_post_non_root_user(self):
|
||||||
|
url = "/accounts/users/reset/"
|
||||||
|
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||||
|
self.client.force_authenticate(user=self.alice)
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
def test_put(self):
|
def test_put(self):
|
||||||
url = "/accounts/users/reset/"
|
url = "/accounts/users/reset/"
|
||||||
data = {"id": self.john.pk}
|
data = {"id": self.john.pk}
|
||||||
@@ -195,12 +250,46 @@ class TestUserAction(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
def test_darkmode(self):
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_put_root_user(self):
|
||||||
|
url = "/accounts/users/reset/"
|
||||||
|
data = {"id": self.john.pk}
|
||||||
|
r = self.client.put(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
user = User.objects.get(pk=self.john.pk)
|
||||||
|
self.assertEqual(user.totp_key, "")
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_put_non_root_user(self):
|
||||||
|
url = "/accounts/users/reset/"
|
||||||
|
data = {"id": self.john.pk}
|
||||||
|
self.client.force_authenticate(user=self.alice)
|
||||||
|
r = self.client.put(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
|
def test_user_ui(self):
|
||||||
url = "/accounts/users/ui/"
|
url = "/accounts/users/ui/"
|
||||||
data = {"dark_mode": False}
|
data = {"dark_mode": False}
|
||||||
r = self.client.patch(url, data, format="json")
|
r = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
data = {"show_community_scripts": True}
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
data = {"agent_dblclick_action": "editagent"}
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
data = {"agent_dblclick_action": "remotebg"}
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
data = {"agent_dblclick_action": "takecontrol"}
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ class LoginView(KnoxLoginView):
|
|||||||
|
|
||||||
if settings.DEBUG and token == "sekret":
|
if settings.DEBUG and token == "sekret":
|
||||||
valid = True
|
valid = True
|
||||||
elif totp.verify(token, valid_window=1):
|
elif totp.verify(token, valid_window=10):
|
||||||
valid = True
|
valid = True
|
||||||
|
|
||||||
if valid:
|
if valid:
|
||||||
@@ -108,6 +108,13 @@ class GetUpdateDeleteUser(APIView):
|
|||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
user = get_object_or_404(User, pk=pk)
|
user = get_object_or_404(User, pk=pk)
|
||||||
|
|
||||||
|
if (
|
||||||
|
hasattr(settings, "ROOT_USER")
|
||||||
|
and request.user != user
|
||||||
|
and user.username == settings.ROOT_USER
|
||||||
|
):
|
||||||
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
@@ -115,7 +122,15 @@ class GetUpdateDeleteUser(APIView):
|
|||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
get_object_or_404(User, pk=pk).delete()
|
user = get_object_or_404(User, pk=pk)
|
||||||
|
if (
|
||||||
|
hasattr(settings, "ROOT_USER")
|
||||||
|
and request.user != user
|
||||||
|
and user.username == settings.ROOT_USER
|
||||||
|
):
|
||||||
|
return notify_error("The root user cannot be deleted from the UI")
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
@@ -124,8 +139,14 @@ class UserActions(APIView):
|
|||||||
|
|
||||||
# reset password
|
# reset password
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
|
|
||||||
user = get_object_or_404(User, pk=request.data["id"])
|
user = get_object_or_404(User, pk=request.data["id"])
|
||||||
|
if (
|
||||||
|
hasattr(settings, "ROOT_USER")
|
||||||
|
and request.user != user
|
||||||
|
and user.username == settings.ROOT_USER
|
||||||
|
):
|
||||||
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
user.set_password(request.data["password"])
|
user.set_password(request.data["password"])
|
||||||
user.save()
|
user.save()
|
||||||
|
|
||||||
@@ -133,8 +154,14 @@ class UserActions(APIView):
|
|||||||
|
|
||||||
# reset two factor token
|
# reset two factor token
|
||||||
def put(self, request):
|
def put(self, request):
|
||||||
|
|
||||||
user = get_object_or_404(User, pk=request.data["id"])
|
user = get_object_or_404(User, pk=request.data["id"])
|
||||||
|
if (
|
||||||
|
hasattr(settings, "ROOT_USER")
|
||||||
|
and request.user != user
|
||||||
|
and user.username == settings.ROOT_USER
|
||||||
|
):
|
||||||
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
user.totp_key = ""
|
user.totp_key = ""
|
||||||
user.save()
|
user.save()
|
||||||
|
|
||||||
@@ -161,6 +188,18 @@ class TOTPSetup(APIView):
|
|||||||
class UserUI(APIView):
|
class UserUI(APIView):
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
user = request.user
|
user = request.user
|
||||||
|
|
||||||
|
if "dark_mode" in request.data.keys():
|
||||||
user.dark_mode = request.data["dark_mode"]
|
user.dark_mode = request.data["dark_mode"]
|
||||||
user.save(update_fields=["dark_mode"])
|
user.save(update_fields=["dark_mode"])
|
||||||
|
|
||||||
|
if "show_community_scripts" in request.data.keys():
|
||||||
|
user.show_community_scripts = request.data["show_community_scripts"]
|
||||||
|
user.save(update_fields=["show_community_scripts"])
|
||||||
|
|
||||||
|
if "userui" in request.data.keys():
|
||||||
|
user.agent_dblclick_action = request.data["agent_dblclick_action"]
|
||||||
|
user.default_agent_tbl_tab = request.data["default_agent_tbl_tab"]
|
||||||
|
user.save(update_fields=["agent_dblclick_action", "default_agent_tbl_tab"])
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
@@ -26,7 +26,7 @@ def get_wmi_data():
|
|||||||
agent = Recipe(
|
agent = Recipe(
|
||||||
Agent,
|
Agent,
|
||||||
hostname="DESKTOP-TEST123",
|
hostname="DESKTOP-TEST123",
|
||||||
version="1.1.1",
|
version="1.3.0",
|
||||||
monitoring_type=cycle(["workstation", "server"]),
|
monitoring_type=cycle(["workstation", "server"]),
|
||||||
salt_id=generate_agent_id("DESKTOP-TEST123"),
|
salt_id=generate_agent_id("DESKTOP-TEST123"),
|
||||||
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
|
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
|
||||||
|
|||||||
@@ -7,14 +7,20 @@ import django.db.models.deletion
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('clients', '0006_deployment'),
|
("clients", "0006_deployment"),
|
||||||
('agents', '0020_auto_20201025_2129'),
|
("agents", "0020_auto_20201025_2129"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='agent',
|
model_name="agent",
|
||||||
name='site_link',
|
name="site_link",
|
||||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='clients.site'),
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="agents",
|
||||||
|
to="clients.site",
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,16 +6,16 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('agents', '0022_update_site_primary_key'),
|
("agents", "0022_update_site_primary_key"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='agent',
|
model_name="agent",
|
||||||
name='client',
|
name="client",
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='agent',
|
model_name="agent",
|
||||||
name='site',
|
name="site",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,13 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('agents', '0023_auto_20201101_2312'),
|
("agents", "0023_auto_20201101_2312"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RenameField(
|
migrations.RenameField(
|
||||||
model_name='agent',
|
model_name="agent",
|
||||||
old_name='site_link',
|
old_name="site_link",
|
||||||
new_name='site',
|
new_name="site",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,22 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('agents', '0024_auto_20201101_2319'),
|
("agents", "0024_auto_20201101_2319"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='recoveryaction',
|
model_name="recoveryaction",
|
||||||
name='mode',
|
name="mode",
|
||||||
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC')], default='mesh', max_length=50),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("salt", "Salt"),
|
||||||
|
("mesh", "Mesh"),
|
||||||
|
("command", "Command"),
|
||||||
|
("rpc", "Nats RPC"),
|
||||||
|
],
|
||||||
|
default="mesh",
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,23 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('agents', '0025_auto_20201122_0407'),
|
("agents", "0025_auto_20201122_0407"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='recoveryaction',
|
model_name="recoveryaction",
|
||||||
name='mode',
|
name="mode",
|
||||||
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC'), ('checkrunner', 'Checkrunner')], default='mesh', max_length=50),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("salt", "Salt"),
|
||||||
|
("mesh", "Mesh"),
|
||||||
|
("command", "Command"),
|
||||||
|
("rpc", "Nats RPC"),
|
||||||
|
("checkrunner", "Checkrunner"),
|
||||||
|
],
|
||||||
|
default="mesh",
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import requests
|
|
||||||
import time
|
import time
|
||||||
import base64
|
import base64
|
||||||
from Crypto.Cipher import AES
|
from Crypto.Cipher import AES
|
||||||
@@ -9,6 +8,7 @@ import validators
|
|||||||
import msgpack
|
import msgpack
|
||||||
import re
|
import re
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
|
from typing import List
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
@@ -117,14 +117,6 @@ class Agent(BaseAuditModel):
|
|||||||
return settings.DL_32
|
return settings.DL_32
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
|
||||||
def winsalt_dl(self):
|
|
||||||
if self.arch == "64":
|
|
||||||
return settings.SALT_64
|
|
||||||
elif self.arch == "32":
|
|
||||||
return settings.SALT_32
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def win_inno_exe(self):
|
def win_inno_exe(self):
|
||||||
if self.arch == "64":
|
if self.arch == "64":
|
||||||
@@ -382,14 +374,15 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
return patch_policy
|
return patch_policy
|
||||||
|
|
||||||
# clear is used to delete managed policy checks from agent
|
def get_approved_update_guids(self) -> List[str]:
|
||||||
# parent_checks specifies a list of checks to delete from agent with matching parent_check field
|
return list(
|
||||||
def generate_checks_from_policies(self, clear=False):
|
self.winupdates.filter(action="approve", installed=False).values_list(
|
||||||
from automation.models import Policy
|
"guid", flat=True
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Clear agent checks managed by policy
|
def generate_checks_from_policies(self):
|
||||||
if clear:
|
from automation.models import Policy
|
||||||
self.agentchecks.filter(managed_by_policy=True).delete()
|
|
||||||
|
|
||||||
# Clear agent checks that have overriden_by_policy set
|
# Clear agent checks that have overriden_by_policy set
|
||||||
self.agentchecks.update(overriden_by_policy=False)
|
self.agentchecks.update(overriden_by_policy=False)
|
||||||
@@ -397,17 +390,9 @@ class Agent(BaseAuditModel):
|
|||||||
# Generate checks based on policies
|
# Generate checks based on policies
|
||||||
Policy.generate_policy_checks(self)
|
Policy.generate_policy_checks(self)
|
||||||
|
|
||||||
# clear is used to delete managed policy tasks from agent
|
def generate_tasks_from_policies(self):
|
||||||
# parent_tasks specifies a list of tasks to delete from agent with matching parent_task field
|
|
||||||
def generate_tasks_from_policies(self, clear=False):
|
|
||||||
from autotasks.tasks import delete_win_task_schedule
|
|
||||||
from automation.models import Policy
|
from automation.models import Policy
|
||||||
|
|
||||||
# Clear agent tasks managed by policy
|
|
||||||
if clear:
|
|
||||||
for task in self.autotasks.filter(managed_by_policy=True):
|
|
||||||
delete_win_task_schedule.delay(task.pk)
|
|
||||||
|
|
||||||
# Generate tasks based on policies
|
# Generate tasks based on policies
|
||||||
Policy.generate_policy_tasks(self)
|
Policy.generate_policy_tasks(self)
|
||||||
|
|
||||||
@@ -466,77 +451,6 @@ class Agent(BaseAuditModel):
|
|||||||
await nc.flush()
|
await nc.flush()
|
||||||
await nc.close()
|
await nc.close()
|
||||||
|
|
||||||
def salt_api_cmd(self, **kwargs):
|
|
||||||
|
|
||||||
# salt should always timeout first before the requests' timeout
|
|
||||||
try:
|
|
||||||
timeout = kwargs["timeout"]
|
|
||||||
except KeyError:
|
|
||||||
# default timeout
|
|
||||||
timeout = 15
|
|
||||||
salt_timeout = 12
|
|
||||||
else:
|
|
||||||
if timeout < 8:
|
|
||||||
timeout = 8
|
|
||||||
salt_timeout = 5
|
|
||||||
else:
|
|
||||||
salt_timeout = timeout - 3
|
|
||||||
|
|
||||||
json = {
|
|
||||||
"client": "local",
|
|
||||||
"tgt": self.salt_id,
|
|
||||||
"fun": kwargs["func"],
|
|
||||||
"timeout": salt_timeout,
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
|
|
||||||
if "arg" in kwargs:
|
|
||||||
json.update({"arg": kwargs["arg"]})
|
|
||||||
if "kwargs" in kwargs:
|
|
||||||
json.update({"kwarg": kwargs["kwargs"]})
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = requests.post(
|
|
||||||
f"http://{settings.SALT_HOST}:8123/run",
|
|
||||||
json=[json],
|
|
||||||
timeout=timeout,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
return "timeout"
|
|
||||||
|
|
||||||
try:
|
|
||||||
ret = resp.json()["return"][0][self.salt_id]
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"{self.salt_id}: {e}")
|
|
||||||
return "error"
|
|
||||||
else:
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def salt_api_async(self, **kwargs):
|
|
||||||
|
|
||||||
json = {
|
|
||||||
"client": "local_async",
|
|
||||||
"tgt": self.salt_id,
|
|
||||||
"fun": kwargs["func"],
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
|
|
||||||
if "arg" in kwargs:
|
|
||||||
json.update({"arg": kwargs["arg"]})
|
|
||||||
if "kwargs" in kwargs:
|
|
||||||
json.update({"kwarg": kwargs["kwargs"]})
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
|
||||||
except Exception:
|
|
||||||
return "timeout"
|
|
||||||
|
|
||||||
return resp
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(agent):
|
def serialize(agent):
|
||||||
# serializes the agent and returns json
|
# serializes the agent and returns json
|
||||||
@@ -547,32 +461,6 @@ class Agent(BaseAuditModel):
|
|||||||
del ret["client"]
|
del ret["client"]
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def salt_batch_async(**kwargs):
|
|
||||||
assert isinstance(kwargs["minions"], list)
|
|
||||||
|
|
||||||
json = {
|
|
||||||
"client": "local_async",
|
|
||||||
"tgt_type": "list",
|
|
||||||
"tgt": kwargs["minions"],
|
|
||||||
"fun": kwargs["func"],
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
|
|
||||||
if "arg" in kwargs:
|
|
||||||
json.update({"arg": kwargs["arg"]})
|
|
||||||
if "kwargs" in kwargs:
|
|
||||||
json.update({"kwarg": kwargs["kwargs"]})
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
|
||||||
except Exception:
|
|
||||||
return "timeout"
|
|
||||||
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def delete_superseded_updates(self):
|
def delete_superseded_updates(self):
|
||||||
try:
|
try:
|
||||||
pks = [] # list of pks to delete
|
pks = [] # list of pks to delete
|
||||||
@@ -625,6 +513,13 @@ class Agent(BaseAuditModel):
|
|||||||
elif action.details["action"] == "taskdelete":
|
elif action.details["action"] == "taskdelete":
|
||||||
delete_win_task_schedule.delay(task_id, pending_action=action.id)
|
delete_win_task_schedule.delay(task_id, pending_action=action.id)
|
||||||
|
|
||||||
|
# for clearing duplicate pending actions on agent
|
||||||
|
def remove_matching_pending_task_actions(self, task_id):
|
||||||
|
# remove any other pending actions on agent with same task_id
|
||||||
|
for action in self.pendingactions.exclude(status="completed"):
|
||||||
|
if action.details["task_id"] == task_id:
|
||||||
|
action.delete()
|
||||||
|
|
||||||
|
|
||||||
class AgentOutage(models.Model):
|
class AgentOutage(models.Model):
|
||||||
agent = models.ForeignKey(
|
agent = models.ForeignKey(
|
||||||
|
|||||||
@@ -42,11 +42,13 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
|||||||
last_seen = serializers.SerializerMethodField()
|
last_seen = serializers.SerializerMethodField()
|
||||||
client_name = serializers.ReadOnlyField(source="client.name")
|
client_name = serializers.ReadOnlyField(source="client.name")
|
||||||
site_name = serializers.ReadOnlyField(source="site.name")
|
site_name = serializers.ReadOnlyField(source="site.name")
|
||||||
|
logged_username = serializers.SerializerMethodField()
|
||||||
|
italic = serializers.SerializerMethodField()
|
||||||
|
|
||||||
def get_pending_actions(self, obj):
|
def get_pending_actions(self, obj):
|
||||||
return obj.pendingactions.filter(status="pending").count()
|
return obj.pendingactions.filter(status="pending").count()
|
||||||
|
|
||||||
def get_last_seen(self, obj):
|
def get_last_seen(self, obj) -> str:
|
||||||
if obj.time_zone is not None:
|
if obj.time_zone is not None:
|
||||||
agent_tz = pytz.timezone(obj.time_zone)
|
agent_tz = pytz.timezone(obj.time_zone)
|
||||||
else:
|
else:
|
||||||
@@ -54,6 +56,17 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
|||||||
|
|
||||||
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M:%S")
|
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M:%S")
|
||||||
|
|
||||||
|
def get_logged_username(self, obj) -> str:
|
||||||
|
if obj.logged_in_username == "None" and obj.status == "online":
|
||||||
|
return obj.last_logged_in_user
|
||||||
|
elif obj.logged_in_username != "None":
|
||||||
|
return obj.logged_in_username
|
||||||
|
else:
|
||||||
|
return "-"
|
||||||
|
|
||||||
|
def get_italic(self, obj) -> bool:
|
||||||
|
return obj.logged_in_username == "None" and obj.status == "online"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Agent
|
model = Agent
|
||||||
fields = [
|
fields = [
|
||||||
@@ -73,9 +86,9 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
|||||||
"last_seen",
|
"last_seen",
|
||||||
"boot_time",
|
"boot_time",
|
||||||
"checks",
|
"checks",
|
||||||
"logged_in_username",
|
|
||||||
"last_logged_in_user",
|
|
||||||
"maintenance_mode",
|
"maintenance_mode",
|
||||||
|
"logged_username",
|
||||||
|
"italic",
|
||||||
]
|
]
|
||||||
depth = 2
|
depth = 2
|
||||||
|
|
||||||
|
|||||||
@@ -2,11 +2,11 @@ import asyncio
|
|||||||
from loguru import logger
|
from loguru import logger
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import random
|
import random
|
||||||
import requests
|
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from scripts.models import Script
|
||||||
|
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
from agents.models import Agent, AgentOutage
|
from agents.models import Agent, AgentOutage
|
||||||
@@ -16,6 +16,45 @@ from logs.models import PendingAction
|
|||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_agent_service(pk: int) -> None:
|
||||||
|
agent = Agent.objects.get(pk=pk)
|
||||||
|
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=2))
|
||||||
|
if r == "pong":
|
||||||
|
logger.info(
|
||||||
|
f"Detected crashed tacticalagent service on {agent.hostname}, attempting recovery"
|
||||||
|
)
|
||||||
|
data = {"func": "recover", "payload": {"mode": "tacagent"}}
|
||||||
|
asyncio.run(agent.nats_cmd(data, wait=False))
|
||||||
|
|
||||||
|
|
||||||
|
def _check_in_full(pk: int) -> None:
|
||||||
|
agent = Agent.objects.get(pk=pk)
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "checkinfull"}, wait=False))
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def check_in_task() -> None:
|
||||||
|
q = Agent.objects.only("pk", "version")
|
||||||
|
agents: List[int] = [
|
||||||
|
i.pk for i in q if pyver.parse(i.version) == pyver.parse("1.1.12")
|
||||||
|
]
|
||||||
|
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||||
|
for chunk in chunks:
|
||||||
|
for pk in chunk:
|
||||||
|
_check_in_full(pk)
|
||||||
|
sleep(0.1)
|
||||||
|
rand = random.randint(3, 7)
|
||||||
|
sleep(rand)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def monitor_agents_task() -> None:
|
||||||
|
q = Agent.objects.all()
|
||||||
|
agents: List[int] = [i.pk for i in q if i.has_nats and i.status != "online"]
|
||||||
|
for agent in agents:
|
||||||
|
_check_agent_service(agent)
|
||||||
|
|
||||||
|
|
||||||
def agent_update(pk: int) -> str:
|
def agent_update(pk: int) -> str:
|
||||||
agent = Agent.objects.get(pk=pk)
|
agent = Agent.objects.get(pk=pk)
|
||||||
# skip if we can't determine the arch
|
# skip if we can't determine the arch
|
||||||
@@ -23,23 +62,12 @@ def agent_update(pk: int) -> str:
|
|||||||
logger.warning(f"Unable to determine arch on {agent.hostname}. Skipping.")
|
logger.warning(f"Unable to determine arch on {agent.hostname}. Skipping.")
|
||||||
return "noarch"
|
return "noarch"
|
||||||
|
|
||||||
# force an update to 1.1.5 since 1.1.6 needs agent to be on 1.1.5 first
|
|
||||||
if pyver.parse(agent.version) < pyver.parse("1.1.5"):
|
|
||||||
version = "1.1.5"
|
|
||||||
if agent.arch == "64":
|
|
||||||
url = "https://github.com/wh1te909/rmmagent/releases/download/v1.1.5/winagent-v1.1.5.exe"
|
|
||||||
inno = "winagent-v1.1.5.exe"
|
|
||||||
elif agent.arch == "32":
|
|
||||||
url = "https://github.com/wh1te909/rmmagent/releases/download/v1.1.5/winagent-v1.1.5-x86.exe"
|
|
||||||
inno = "winagent-v1.1.5-x86.exe"
|
|
||||||
else:
|
|
||||||
return "nover"
|
|
||||||
else:
|
|
||||||
version = settings.LATEST_AGENT_VER
|
version = settings.LATEST_AGENT_VER
|
||||||
url = agent.winagent_dl
|
url = agent.winagent_dl
|
||||||
inno = agent.win_inno_exe
|
inno = agent.win_inno_exe
|
||||||
|
|
||||||
if agent.has_nats:
|
if agent.has_nats:
|
||||||
|
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||||
if agent.pendingactions.filter(
|
if agent.pendingactions.filter(
|
||||||
action_type="agentupdate", status="pending"
|
action_type="agentupdate", status="pending"
|
||||||
).exists():
|
).exists():
|
||||||
@@ -60,18 +88,20 @@ def agent_update(pk: int) -> str:
|
|||||||
"inno": inno,
|
"inno": inno,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
return "created"
|
|
||||||
# TODO
|
|
||||||
# Salt is deprecated, remove this once salt is gone
|
|
||||||
else:
|
else:
|
||||||
agent.salt_api_async(
|
nats_data = {
|
||||||
func="win_agent.do_agent_update_v2",
|
"func": "agentupdate",
|
||||||
kwargs={
|
"payload": {
|
||||||
"inno": inno,
|
|
||||||
"url": url,
|
"url": url,
|
||||||
|
"version": version,
|
||||||
|
"inno": inno,
|
||||||
},
|
},
|
||||||
)
|
}
|
||||||
return "salt"
|
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||||
|
|
||||||
|
return "created"
|
||||||
|
|
||||||
|
return "not supported"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
@@ -80,16 +110,18 @@ def send_agent_update_task(pks: List[int], version: str) -> None:
|
|||||||
agents: List[int] = [
|
agents: List[int] = [
|
||||||
i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)
|
i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)
|
||||||
]
|
]
|
||||||
|
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
||||||
for pk in agents:
|
for chunk in chunks:
|
||||||
|
for pk in chunk:
|
||||||
agent_update(pk)
|
agent_update(pk)
|
||||||
|
sleep(0.05)
|
||||||
|
sleep(4)
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def auto_self_agent_update_task() -> None:
|
def auto_self_agent_update_task() -> None:
|
||||||
core = CoreSettings.objects.first()
|
core = CoreSettings.objects.first()
|
||||||
if not core.agent_auto_update:
|
if not core.agent_auto_update:
|
||||||
logger.info("Agent auto update is disabled. Skipping.")
|
|
||||||
return
|
return
|
||||||
|
|
||||||
q = Agent.objects.only("pk", "version")
|
q = Agent.objects.only("pk", "version")
|
||||||
@@ -99,8 +131,29 @@ def auto_self_agent_update_task() -> None:
|
|||||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
]
|
]
|
||||||
|
|
||||||
for pk in pks:
|
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||||
|
for chunk in chunks:
|
||||||
|
for pk in chunk:
|
||||||
agent_update(pk)
|
agent_update(pk)
|
||||||
|
sleep(0.05)
|
||||||
|
sleep(4)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def get_wmi_task():
|
||||||
|
agents = Agent.objects.all()
|
||||||
|
online = [
|
||||||
|
i
|
||||||
|
for i in agents
|
||||||
|
if pyver.parse(i.version) >= pyver.parse("1.2.0") and i.status == "online"
|
||||||
|
]
|
||||||
|
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
||||||
|
for chunk in chunks:
|
||||||
|
for agent in chunk:
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "wmi"}, wait=False))
|
||||||
|
sleep(0.1)
|
||||||
|
rand = random.randint(3, 7)
|
||||||
|
sleep(rand)
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
@@ -109,98 +162,18 @@ def sync_sysinfo_task():
|
|||||||
online = [
|
online = [
|
||||||
i
|
i
|
||||||
for i in agents
|
for i in agents
|
||||||
if pyver.parse(i.version) >= pyver.parse("1.1.3") and i.status == "online"
|
if pyver.parse(i.version) >= pyver.parse("1.1.3")
|
||||||
|
and pyver.parse(i.version) <= pyver.parse("1.1.12")
|
||||||
|
and i.status == "online"
|
||||||
]
|
]
|
||||||
for agent in online:
|
|
||||||
asyncio.run(agent.nats_cmd({"func": "sync"}, wait=False))
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def sync_salt_modules_task(pk):
|
|
||||||
agent = Agent.objects.get(pk=pk)
|
|
||||||
r = agent.salt_api_cmd(timeout=35, func="saltutil.sync_modules")
|
|
||||||
# successful sync if new/charnged files: {'return': [{'MINION-15': ['modules.get_eventlog', 'modules.win_agent', 'etc...']}]}
|
|
||||||
# successful sync with no new/changed files: {'return': [{'MINION-15': []}]}
|
|
||||||
if r == "timeout" or r == "error":
|
|
||||||
return f"Unable to sync modules {agent.salt_id}"
|
|
||||||
|
|
||||||
return f"Successfully synced salt modules on {agent.hostname}"
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def batch_sync_modules_task():
|
|
||||||
# sync modules, split into chunks of 50 agents to not overload salt
|
|
||||||
agents = Agent.objects.all()
|
|
||||||
online = [i.salt_id for i in agents]
|
|
||||||
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
|
for agent in chunk:
|
||||||
sleep(10)
|
asyncio.run(agent.nats_cmd({"func": "sync"}, wait=False))
|
||||||
|
sleep(0.1)
|
||||||
|
rand = random.randint(3, 7)
|
||||||
@app.task
|
sleep(rand)
|
||||||
def uninstall_agent_task(salt_id, has_nats):
|
|
||||||
attempts = 0
|
|
||||||
error = False
|
|
||||||
|
|
||||||
if not has_nats:
|
|
||||||
while 1:
|
|
||||||
try:
|
|
||||||
|
|
||||||
r = requests.post(
|
|
||||||
f"http://{settings.SALT_HOST}:8123/run",
|
|
||||||
json=[
|
|
||||||
{
|
|
||||||
"client": "local",
|
|
||||||
"tgt": salt_id,
|
|
||||||
"fun": "win_agent.uninstall_agent",
|
|
||||||
"timeout": 8,
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
timeout=10,
|
|
||||||
)
|
|
||||||
ret = r.json()["return"][0][salt_id]
|
|
||||||
except Exception:
|
|
||||||
attempts += 1
|
|
||||||
else:
|
|
||||||
if ret != "ok":
|
|
||||||
attempts += 1
|
|
||||||
else:
|
|
||||||
attempts = 0
|
|
||||||
|
|
||||||
if attempts >= 10:
|
|
||||||
error = True
|
|
||||||
break
|
|
||||||
elif attempts == 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
if error:
|
|
||||||
logger.error(f"{salt_id} uninstall failed")
|
|
||||||
else:
|
|
||||||
logger.info(f"{salt_id} was successfully uninstalled")
|
|
||||||
|
|
||||||
try:
|
|
||||||
r = requests.post(
|
|
||||||
f"http://{settings.SALT_HOST}:8123/run",
|
|
||||||
json=[
|
|
||||||
{
|
|
||||||
"client": "wheel",
|
|
||||||
"fun": "key.delete",
|
|
||||||
"match": salt_id,
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
timeout=30,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
logger.error(f"{salt_id} unable to remove salt-key")
|
|
||||||
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
@@ -255,8 +228,90 @@ def agent_outages_task():
|
|||||||
outage = AgentOutage(agent=agent)
|
outage = AgentOutage(agent=agent)
|
||||||
outage.save()
|
outage.save()
|
||||||
|
|
||||||
|
# add a null check history to allow gaps in graph
|
||||||
|
for check in agent.agentchecks.all():
|
||||||
|
check.add_check_history(None)
|
||||||
|
|
||||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
if agent.overdue_email_alert and not agent.maintenance_mode:
|
||||||
agent_outage_email_task.delay(pk=outage.pk)
|
agent_outage_email_task.delay(pk=outage.pk)
|
||||||
|
|
||||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
if agent.overdue_text_alert and not agent.maintenance_mode:
|
||||||
agent_outage_sms_task.delay(pk=outage.pk)
|
agent_outage_sms_task.delay(pk=outage.pk)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def handle_agent_recovery_task(pk: int) -> None:
|
||||||
|
sleep(10)
|
||||||
|
from agents.models import RecoveryAction
|
||||||
|
|
||||||
|
action = RecoveryAction.objects.get(pk=pk)
|
||||||
|
if action.mode == "command":
|
||||||
|
data = {"func": "recoverycmd", "recoverycommand": action.command}
|
||||||
|
else:
|
||||||
|
data = {"func": "recover", "payload": {"mode": action.mode}}
|
||||||
|
|
||||||
|
asyncio.run(action.agent.nats_cmd(data, wait=False))
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def run_script_email_results_task(
|
||||||
|
agentpk: int, scriptpk: int, nats_timeout: int, nats_data: dict, emails: List[str]
|
||||||
|
):
|
||||||
|
agent = Agent.objects.get(pk=agentpk)
|
||||||
|
script = Script.objects.get(pk=scriptpk)
|
||||||
|
nats_data["func"] = "runscriptfull"
|
||||||
|
r = asyncio.run(agent.nats_cmd(nats_data, timeout=nats_timeout))
|
||||||
|
if r == "timeout":
|
||||||
|
logger.error(f"{agent.hostname} timed out running script.")
|
||||||
|
return
|
||||||
|
|
||||||
|
CORE = CoreSettings.objects.first()
|
||||||
|
subject = f"{agent.hostname} {script.name} Results"
|
||||||
|
exec_time = "{:.4f}".format(r["execution_time"])
|
||||||
|
body = (
|
||||||
|
subject
|
||||||
|
+ f"\nReturn code: {r['retcode']}\nExecution time: {exec_time} seconds\nStdout: {r['stdout']}\nStderr: {r['stderr']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
import smtplib
|
||||||
|
from email.message import EmailMessage
|
||||||
|
|
||||||
|
msg = EmailMessage()
|
||||||
|
msg["Subject"] = subject
|
||||||
|
msg["From"] = CORE.smtp_from_email
|
||||||
|
|
||||||
|
if emails:
|
||||||
|
msg["To"] = ", ".join(emails)
|
||||||
|
else:
|
||||||
|
msg["To"] = ", ".join(CORE.email_alert_recipients)
|
||||||
|
|
||||||
|
msg.set_content(body)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
|
||||||
|
if CORE.smtp_requires_auth:
|
||||||
|
server.ehlo()
|
||||||
|
server.starttls()
|
||||||
|
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
|
||||||
|
server.send_message(msg)
|
||||||
|
server.quit()
|
||||||
|
else:
|
||||||
|
server.send_message(msg)
|
||||||
|
server.quit()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def remove_salt_task() -> None:
|
||||||
|
if hasattr(settings, "KEEP_SALT") and settings.KEEP_SALT:
|
||||||
|
return
|
||||||
|
|
||||||
|
q = Agent.objects.all()
|
||||||
|
agents = [i for i in q if pyver.parse(i.version) >= pyver.parse("1.3.0")]
|
||||||
|
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||||
|
for chunk in chunks:
|
||||||
|
for agent in chunk:
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "removesalt"}, wait=False))
|
||||||
|
sleep(0.1)
|
||||||
|
sleep(4)
|
||||||
|
|||||||
@@ -14,12 +14,6 @@ from tacticalrmm.test import TacticalTestCase
|
|||||||
from .serializers import AgentSerializer
|
from .serializers import AgentSerializer
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
from .models import Agent
|
from .models import Agent
|
||||||
from .tasks import (
|
|
||||||
agent_recovery_sms_task,
|
|
||||||
auto_self_agent_update_task,
|
|
||||||
sync_salt_modules_task,
|
|
||||||
batch_sync_modules_task,
|
|
||||||
)
|
|
||||||
from winupdate.models import WinUpdatePolicy
|
from winupdate.models import WinUpdatePolicy
|
||||||
|
|
||||||
|
|
||||||
@@ -110,9 +104,8 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
@patch("agents.tasks.uninstall_agent_task.delay")
|
|
||||||
@patch("agents.views.reload_nats")
|
@patch("agents.views.reload_nats")
|
||||||
def test_uninstall(self, reload_nats, mock_task, nats_cmd):
|
def test_uninstall(self, reload_nats, nats_cmd):
|
||||||
url = "/agents/uninstall/"
|
url = "/agents/uninstall/"
|
||||||
data = {"pk": self.agent.pk}
|
data = {"pk": self.agent.pk}
|
||||||
|
|
||||||
@@ -121,13 +114,18 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
|
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
|
||||||
reload_nats.assert_called_once()
|
reload_nats.assert_called_once()
|
||||||
mock_task.assert_called_with(self.agent.salt_id, True)
|
|
||||||
|
|
||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_get_processes(self, mock_ret):
|
def test_get_processes(self, mock_ret):
|
||||||
url = f"/agents/{self.agent.pk}/getprocs/"
|
agent_old = baker.make_recipe("agents.online_agent", version="1.1.12")
|
||||||
|
url_old = f"/agents/{agent_old.pk}/getprocs/"
|
||||||
|
r = self.client.get(url_old)
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
|
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
|
||||||
|
url = f"/agents/{agent.pk}/getprocs/"
|
||||||
|
|
||||||
with open(
|
with open(
|
||||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/procs.json")
|
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/procs.json")
|
||||||
@@ -137,9 +135,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
assert any(i["name"] == "Registry" for i in mock_ret.return_value)
|
assert any(i["name"] == "Registry" for i in mock_ret.return_value)
|
||||||
assert any(
|
assert any(i["membytes"] == 434655234324 for i in mock_ret.return_value)
|
||||||
i["memory_percent"] == 0.004843281375620747 for i in mock_ret.return_value
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_ret.return_value = "timeout"
|
mock_ret.return_value = "timeout"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
@@ -331,7 +327,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
data["mode"] = "salt"
|
data["mode"] = "mesh"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertIn("pending", r.json())
|
self.assertIn("pending", r.json())
|
||||||
@@ -351,7 +347,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.agent.version = "0.9.4"
|
self.agent.version = "0.9.4"
|
||||||
self.agent.save(update_fields=["version"])
|
self.agent.save(update_fields=["version"])
|
||||||
data["mode"] = "salt"
|
data["mode"] = "mesh"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertIn("0.9.5", r.json())
|
self.assertIn("0.9.5", r.json())
|
||||||
@@ -539,7 +535,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
@patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
""" @patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||||
@patch("scripts.tasks.handle_bulk_script_task.delay")
|
@patch("scripts.tasks.handle_bulk_script_task.delay")
|
||||||
@patch("scripts.tasks.handle_bulk_command_task.delay")
|
@patch("scripts.tasks.handle_bulk_command_task.delay")
|
||||||
@patch("agents.models.Agent.salt_batch_async")
|
@patch("agents.models.Agent.salt_batch_async")
|
||||||
@@ -550,6 +546,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "command",
|
"mode": "command",
|
||||||
|
"monType": "all",
|
||||||
"target": "agents",
|
"target": "agents",
|
||||||
"client": None,
|
"client": None,
|
||||||
"site": None,
|
"site": None,
|
||||||
@@ -567,6 +564,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "command",
|
"mode": "command",
|
||||||
|
"monType": "servers",
|
||||||
"target": "agents",
|
"target": "agents",
|
||||||
"client": None,
|
"client": None,
|
||||||
"site": None,
|
"site": None,
|
||||||
@@ -581,12 +579,11 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "command",
|
"mode": "command",
|
||||||
|
"monType": "workstations",
|
||||||
"target": "client",
|
"target": "client",
|
||||||
"client": self.agent.client.id,
|
"client": self.agent.client.id,
|
||||||
"site": None,
|
"site": None,
|
||||||
"agentPKs": [
|
"agentPKs": [],
|
||||||
self.agent.pk,
|
|
||||||
],
|
|
||||||
"cmd": "gpupdate /force",
|
"cmd": "gpupdate /force",
|
||||||
"timeout": 300,
|
"timeout": 300,
|
||||||
"shell": "cmd",
|
"shell": "cmd",
|
||||||
@@ -598,6 +595,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "command",
|
"mode": "command",
|
||||||
|
"monType": "all",
|
||||||
"target": "client",
|
"target": "client",
|
||||||
"client": self.agent.client.id,
|
"client": self.agent.client.id,
|
||||||
"site": self.agent.site.id,
|
"site": self.agent.site.id,
|
||||||
@@ -615,6 +613,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "scan",
|
"mode": "scan",
|
||||||
|
"monType": "all",
|
||||||
"target": "agents",
|
"target": "agents",
|
||||||
"client": None,
|
"client": None,
|
||||||
"site": None,
|
"site": None,
|
||||||
@@ -628,6 +627,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "install",
|
"mode": "install",
|
||||||
|
"monType": "all",
|
||||||
"target": "client",
|
"target": "client",
|
||||||
"client": self.agent.client.id,
|
"client": self.agent.client.id,
|
||||||
"site": None,
|
"site": None,
|
||||||
@@ -649,7 +649,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
# TODO mock the script
|
# TODO mock the script
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url) """
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_recover_mesh(self, nats_cmd):
|
def test_recover_mesh(self, nats_cmd):
|
||||||
@@ -751,49 +751,14 @@ class TestAgentTasks(TacticalTestCase):
|
|||||||
self.authenticate()
|
self.authenticate()
|
||||||
self.setup_coresettings()
|
self.setup_coresettings()
|
||||||
|
|
||||||
@patch("agents.models.Agent.salt_api_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_sync_salt_modules_task(self, salt_api_cmd):
|
def test_agent_update(self, nats_cmd):
|
||||||
self.agent = baker.make_recipe("agents.agent")
|
|
||||||
salt_api_cmd.return_value = {"return": [{f"{self.agent.salt_id}": []}]}
|
|
||||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
|
||||||
salt_api_cmd.assert_called_with(timeout=35, func="saltutil.sync_modules")
|
|
||||||
self.assertEqual(
|
|
||||||
ret.result, f"Successfully synced salt modules on {self.agent.hostname}"
|
|
||||||
)
|
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
|
||||||
|
|
||||||
salt_api_cmd.return_value = "timeout"
|
|
||||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
|
||||||
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
|
|
||||||
|
|
||||||
salt_api_cmd.return_value = "error"
|
|
||||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
|
||||||
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
|
|
||||||
|
|
||||||
@patch("agents.models.Agent.salt_batch_async", return_value=None)
|
|
||||||
@patch("agents.tasks.sleep", return_value=None)
|
|
||||||
def test_batch_sync_modules_task(self, mock_sleep, salt_batch_async):
|
|
||||||
# chunks of 50, should run 4 times
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent", last_seen=djangotime.now(), _quantity=60
|
|
||||||
)
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.overdue_agent",
|
|
||||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=9),
|
|
||||||
_quantity=115,
|
|
||||||
)
|
|
||||||
ret = batch_sync_modules_task.s().apply()
|
|
||||||
self.assertEqual(salt_batch_async.call_count, 4)
|
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
|
||||||
|
|
||||||
@patch("agents.models.Agent.salt_api_async")
|
|
||||||
def test_agent_update(self, salt_api_async):
|
|
||||||
from agents.tasks import agent_update
|
from agents.tasks import agent_update
|
||||||
|
|
||||||
agent_noarch = baker.make_recipe(
|
agent_noarch = baker.make_recipe(
|
||||||
"agents.agent",
|
"agents.agent",
|
||||||
operating_system="Error getting OS",
|
operating_system="Error getting OS",
|
||||||
version="1.1.0",
|
version="1.1.11",
|
||||||
)
|
)
|
||||||
r = agent_update(agent_noarch.pk)
|
r = agent_update(agent_noarch.pk)
|
||||||
self.assertEqual(r, "noarch")
|
self.assertEqual(r, "noarch")
|
||||||
@@ -804,15 +769,15 @@ class TestAgentTasks(TacticalTestCase):
|
|||||||
0,
|
0,
|
||||||
)
|
)
|
||||||
|
|
||||||
agent64_nats = baker.make_recipe(
|
agent64_111 = baker.make_recipe(
|
||||||
"agents.agent",
|
"agents.agent",
|
||||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
version="1.1.0",
|
version="1.1.11",
|
||||||
)
|
)
|
||||||
|
|
||||||
r = agent_update(agent64_nats.pk)
|
r = agent_update(agent64_111.pk)
|
||||||
self.assertEqual(r, "created")
|
self.assertEqual(r, "created")
|
||||||
action = PendingAction.objects.get(agent__pk=agent64_nats.pk)
|
action = PendingAction.objects.get(agent__pk=agent64_111.pk)
|
||||||
self.assertEqual(action.action_type, "agentupdate")
|
self.assertEqual(action.action_type, "agentupdate")
|
||||||
self.assertEqual(action.status, "pending")
|
self.assertEqual(action.status, "pending")
|
||||||
self.assertEqual(action.details["url"], settings.DL_64)
|
self.assertEqual(action.details["url"], settings.DL_64)
|
||||||
@@ -821,33 +786,24 @@ class TestAgentTasks(TacticalTestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(action.details["version"], settings.LATEST_AGENT_VER)
|
self.assertEqual(action.details["version"], settings.LATEST_AGENT_VER)
|
||||||
|
|
||||||
agent64_salt = baker.make_recipe(
|
agent64 = baker.make_recipe(
|
||||||
"agents.agent",
|
"agents.agent",
|
||||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
version="1.0.0",
|
version="1.1.12",
|
||||||
)
|
)
|
||||||
salt_api_async.return_value = True
|
nats_cmd.return_value = "ok"
|
||||||
r = agent_update(agent64_salt.pk)
|
r = agent_update(agent64.pk)
|
||||||
self.assertEqual(r, "salt")
|
self.assertEqual(r, "created")
|
||||||
salt_api_async.assert_called_with(
|
nats_cmd.assert_called_with(
|
||||||
func="win_agent.do_agent_update_v2",
|
{
|
||||||
kwargs={
|
"func": "agentupdate",
|
||||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
"payload": {
|
||||||
"url": settings.DL_64,
|
"url": settings.DL_64,
|
||||||
|
"version": settings.LATEST_AGENT_VER,
|
||||||
|
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||||
},
|
},
|
||||||
)
|
},
|
||||||
salt_api_async.reset_mock()
|
wait=False,
|
||||||
|
|
||||||
agent32_nats = baker.make_recipe(
|
|
||||||
"agents.agent",
|
|
||||||
operating_system="Windows 7 Professional, 32 bit (build 7601.23964)",
|
|
||||||
version="1.1.0",
|
|
||||||
)
|
|
||||||
|
|
||||||
agent32_salt = baker.make_recipe(
|
|
||||||
"agents.agent",
|
|
||||||
operating_system="Windows 7 Professional, 32 bit (build 7601.23964)",
|
|
||||||
version="1.0.0",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
""" @patch("agents.models.Agent.salt_api_async")
|
""" @patch("agents.models.Agent.salt_api_async")
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import random
|
|||||||
import string
|
import string
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
|
from typing import List
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
@@ -32,8 +33,11 @@ from .serializers import (
|
|||||||
)
|
)
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from .tasks import uninstall_agent_task, send_agent_update_task
|
from .tasks import (
|
||||||
from winupdate.tasks import bulk_check_for_updates_task
|
send_agent_update_task,
|
||||||
|
run_script_email_results_task,
|
||||||
|
)
|
||||||
|
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||||
|
|
||||||
from tacticalrmm.utils import notify_error, reload_nats
|
from tacticalrmm.utils import notify_error, reload_nats
|
||||||
@@ -68,10 +72,6 @@ def ping(request, pk):
|
|||||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||||
if r == "pong":
|
if r == "pong":
|
||||||
status = "online"
|
status = "online"
|
||||||
else:
|
|
||||||
r = agent.salt_api_cmd(timeout=5, func="test.ping")
|
|
||||||
if isinstance(r, bool) and r:
|
|
||||||
status = "online"
|
|
||||||
|
|
||||||
return Response({"name": agent.hostname, "status": status})
|
return Response({"name": agent.hostname, "status": status})
|
||||||
|
|
||||||
@@ -82,13 +82,9 @@ def uninstall(request):
|
|||||||
if agent.has_nats:
|
if agent.has_nats:
|
||||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||||
|
|
||||||
salt_id = agent.salt_id
|
|
||||||
name = agent.hostname
|
name = agent.hostname
|
||||||
has_nats = agent.has_nats
|
|
||||||
agent.delete()
|
agent.delete()
|
||||||
reload_nats()
|
reload_nats()
|
||||||
|
|
||||||
uninstall_agent_task.delay(salt_id, has_nats)
|
|
||||||
return Response(f"{name} will now be uninstalled.")
|
return Response(f"{name} will now be uninstalled.")
|
||||||
|
|
||||||
|
|
||||||
@@ -110,8 +106,8 @@ def edit_agent(request):
|
|||||||
|
|
||||||
# check if site changed and initiate generating correct policies
|
# check if site changed and initiate generating correct policies
|
||||||
if old_site != request.data["site"]:
|
if old_site != request.data["site"]:
|
||||||
agent.generate_checks_from_policies(clear=True)
|
agent.generate_checks_from_policies()
|
||||||
agent.generate_tasks_from_policies(clear=True)
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
@@ -155,12 +151,12 @@ def agent_detail(request, pk):
|
|||||||
@api_view()
|
@api_view()
|
||||||
def get_processes(request, pk):
|
def get_processes(request, pk):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
if not agent.has_nats:
|
if pyver.parse(agent.version) < pyver.parse("1.2.0"):
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
return notify_error("Requires agent version 1.2.0 or greater")
|
||||||
|
|
||||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||||
if r == "timeout":
|
if r == "timeout":
|
||||||
return notify_error("Unable to contact the agent")
|
return notify_error("Unable to contact the agent")
|
||||||
|
|
||||||
return Response(r)
|
return Response(r)
|
||||||
|
|
||||||
|
|
||||||
@@ -607,8 +603,6 @@ def install_agent(request):
|
|||||||
resp = {
|
resp = {
|
||||||
"cmd": " ".join(str(i) for i in cmd),
|
"cmd": " ".join(str(i) for i in cmd),
|
||||||
"url": download_url,
|
"url": download_url,
|
||||||
"salt64": settings.SALT_64,
|
|
||||||
"salt32": settings.SALT_32,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return Response(resp)
|
return Response(resp)
|
||||||
@@ -669,17 +663,12 @@ def recover(request):
|
|||||||
return notify_error("Only available in agent version greater than 0.9.5")
|
return notify_error("Only available in agent version greater than 0.9.5")
|
||||||
|
|
||||||
if not agent.has_nats:
|
if not agent.has_nats:
|
||||||
if mode == "tacagent" or mode == "checkrunner" or mode == "rpc":
|
if mode == "tacagent" or mode == "rpc":
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
return notify_error("Requires agent version 1.1.0 or greater")
|
||||||
|
|
||||||
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
||||||
if agent.has_nats:
|
if agent.has_nats:
|
||||||
if (
|
if mode == "tacagent" or mode == "mesh":
|
||||||
mode == "tacagent"
|
|
||||||
or mode == "checkrunner"
|
|
||||||
or mode == "salt"
|
|
||||||
or mode == "mesh"
|
|
||||||
):
|
|
||||||
data = {"func": "recover", "payload": {"mode": mode}}
|
data = {"func": "recover", "payload": {"mode": mode}}
|
||||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||||
if r == "ok":
|
if r == "ok":
|
||||||
@@ -738,6 +727,21 @@ def run_script(request):
|
|||||||
if output == "wait":
|
if output == "wait":
|
||||||
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
|
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
|
||||||
return Response(r)
|
return Response(r)
|
||||||
|
elif output == "email":
|
||||||
|
if not pyver.parse(agent.version) >= pyver.parse("1.1.12"):
|
||||||
|
return notify_error("Requires agent version 1.1.12 or greater")
|
||||||
|
|
||||||
|
emails = (
|
||||||
|
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||||
|
)
|
||||||
|
run_script_email_results_task.delay(
|
||||||
|
agentpk=agent.pk,
|
||||||
|
scriptpk=script.pk,
|
||||||
|
nats_timeout=req_timeout,
|
||||||
|
nats_data=data,
|
||||||
|
emails=emails,
|
||||||
|
)
|
||||||
|
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||||
else:
|
else:
|
||||||
asyncio.run(agent.nats_cmd(data, wait=False))
|
asyncio.run(agent.nats_cmd(data, wait=False))
|
||||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||||
@@ -825,8 +829,12 @@ def bulk(request):
|
|||||||
else:
|
else:
|
||||||
return notify_error("Something went wrong")
|
return notify_error("Something went wrong")
|
||||||
|
|
||||||
minions = [agent.salt_id for agent in q]
|
if request.data["monType"] == "servers":
|
||||||
agents = [agent.pk for agent in q]
|
q = q.filter(monitoring_type="server")
|
||||||
|
elif request.data["monType"] == "workstations":
|
||||||
|
q = q.filter(monitoring_type="workstation")
|
||||||
|
|
||||||
|
agents: List[int] = [agent.pk for agent in q]
|
||||||
|
|
||||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||||
|
|
||||||
@@ -844,14 +852,12 @@ def bulk(request):
|
|||||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||||
|
|
||||||
elif request.data["mode"] == "install":
|
elif request.data["mode"] == "install":
|
||||||
r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates")
|
bulk_install_updates_task.delay(agents)
|
||||||
if r == "timeout":
|
|
||||||
return notify_error("Salt API not running")
|
|
||||||
return Response(
|
return Response(
|
||||||
f"Pending updates will now be installed on {len(agents)} agents"
|
f"Pending updates will now be installed on {len(agents)} agents"
|
||||||
)
|
)
|
||||||
elif request.data["mode"] == "scan":
|
elif request.data["mode"] == "scan":
|
||||||
bulk_check_for_updates_task.delay(minions=minions)
|
bulk_check_for_updates_task.delay(agents)
|
||||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||||
|
|
||||||
return notify_error("Something went wrong")
|
return notify_error("Something went wrong")
|
||||||
|
|||||||
@@ -7,19 +7,25 @@ import django.db.models.deletion
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('checks', '0010_auto_20200922_1344'),
|
("checks", "0010_auto_20200922_1344"),
|
||||||
('alerts', '0002_auto_20200815_1618'),
|
("alerts", "0002_auto_20200815_1618"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='alert',
|
model_name="alert",
|
||||||
name='assigned_check',
|
name="assigned_check",
|
||||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='checks.check'),
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="alert",
|
||||||
|
to="checks.check",
|
||||||
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='alert',
|
model_name="alert",
|
||||||
name='alert_time',
|
name="alert_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
from django.apps import AppConfig
|
|
||||||
|
|
||||||
|
|
||||||
class Apiv2Config(AppConfig):
|
|
||||||
name = 'apiv2'
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
from unittest.mock import patch
|
|
||||||
from model_bakery import baker
|
|
||||||
from itertools import cycle
|
|
||||||
|
|
||||||
|
|
||||||
class TestAPIv2(TacticalTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.authenticate()
|
|
||||||
self.setup_coresettings()
|
|
||||||
|
|
||||||
@patch("agents.models.Agent.salt_api_cmd")
|
|
||||||
def test_sync_modules(self, mock_ret):
|
|
||||||
# setup data
|
|
||||||
agent = baker.make_recipe("agents.agent")
|
|
||||||
url = "/api/v2/saltminion/"
|
|
||||||
payload = {"agent_id": agent.agent_id}
|
|
||||||
|
|
||||||
mock_ret.return_value = "error"
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
mock_ret.return_value = []
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(r.data, "Modules are already in sync")
|
|
||||||
|
|
||||||
mock_ret.return_value = ["modules.win_agent"]
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
|
||||||
|
|
||||||
mock_ret.return_value = ["askdjaskdjasd", "modules.win_agent"]
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
from . import views
|
|
||||||
from apiv3 import views as v3_views
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path("newagent/", v3_views.NewAgent.as_view()),
|
|
||||||
path("meshexe/", v3_views.MeshExe.as_view()),
|
|
||||||
path("saltminion/", v3_views.SaltMinion.as_view()),
|
|
||||||
path("<str:agentid>/saltminion/", v3_views.SaltMinion.as_view()),
|
|
||||||
path("sysinfo/", v3_views.SysInfo.as_view()),
|
|
||||||
path("hello/", v3_views.Hello.as_view()),
|
|
||||||
path("checkrunner/", views.CheckRunner.as_view()),
|
|
||||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
|
||||||
]
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
from django.shortcuts import get_object_or_404
|
|
||||||
from django.utils import timezone as djangotime
|
|
||||||
|
|
||||||
from rest_framework.authentication import TokenAuthentication
|
|
||||||
from rest_framework.permissions import IsAuthenticated
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.views import APIView
|
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from checks.models import Check
|
|
||||||
|
|
||||||
from checks.serializers import CheckRunnerGetSerializerV2
|
|
||||||
|
|
||||||
|
|
||||||
class CheckRunner(APIView):
|
|
||||||
"""
|
|
||||||
For the windows python agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
authentication_classes = [TokenAuthentication]
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
|
|
||||||
def get(self, request, agentid):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
|
||||||
agent.last_seen = djangotime.now()
|
|
||||||
agent.save(update_fields=["last_seen"])
|
|
||||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
|
||||||
|
|
||||||
ret = {
|
|
||||||
"agent": agent.pk,
|
|
||||||
"check_interval": agent.check_interval,
|
|
||||||
"checks": CheckRunnerGetSerializerV2(checks, many=True).data,
|
|
||||||
}
|
|
||||||
return Response(ret)
|
|
||||||
|
|
||||||
def patch(self, request):
|
|
||||||
check = get_object_or_404(Check, pk=request.data["id"])
|
|
||||||
check.last_run = djangotime.now()
|
|
||||||
check.save(update_fields=["last_run"])
|
|
||||||
status = check.handle_checkv2(request.data)
|
|
||||||
return Response(status)
|
|
||||||
@@ -26,23 +26,6 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
def test_get_salt_minion(self):
|
|
||||||
url = f"/api/v3/{self.agent.agent_id}/saltminion/"
|
|
||||||
url2 = f"/api/v2/{self.agent.agent_id}/saltminion/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertIn("latestVer", r.json().keys())
|
|
||||||
self.assertIn("currentVer", r.json().keys())
|
|
||||||
self.assertIn("salt_id", r.json().keys())
|
|
||||||
self.assertIn("downloadURL", r.json().keys())
|
|
||||||
|
|
||||||
r2 = self.client.get(url2)
|
|
||||||
self.assertEqual(r2.status_code, 200)
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
|
||||||
self.check_not_authenticated("get", url2)
|
|
||||||
|
|
||||||
def test_get_mesh_info(self):
|
def test_get_mesh_info(self):
|
||||||
url = f"/api/v3/{self.agent.pk}/meshinfo/"
|
url = f"/api/v3/{self.agent.pk}/meshinfo/"
|
||||||
|
|
||||||
@@ -61,7 +44,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
def test_sysinfo(self):
|
def test_sysinfo(self):
|
||||||
# TODO replace this with golang wmi sample data
|
# TODO replace this with golang wmi sample data
|
||||||
|
|
||||||
url = f"/api/v3/sysinfo/"
|
url = "/api/v3/sysinfo/"
|
||||||
with open(
|
with open(
|
||||||
os.path.join(
|
os.path.join(
|
||||||
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
|
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
|
||||||
@@ -77,7 +60,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
def test_hello_patch(self):
|
def test_hello_patch(self):
|
||||||
url = f"/api/v3/hello/"
|
url = "/api/v3/hello/"
|
||||||
payload = {
|
payload = {
|
||||||
"agent_id": self.agent.agent_id,
|
"agent_id": self.agent.agent_id,
|
||||||
"logged_in_username": "None",
|
"logged_in_username": "None",
|
||||||
|
|||||||
@@ -2,12 +2,11 @@ from django.urls import path
|
|||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
|
path("checkin/", views.CheckIn.as_view()),
|
||||||
path("hello/", views.Hello.as_view()),
|
path("hello/", views.Hello.as_view()),
|
||||||
path("checkrunner/", views.CheckRunner.as_view()),
|
path("checkrunner/", views.CheckRunner.as_view()),
|
||||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||||
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
||||||
path("saltminion/", views.SaltMinion.as_view()),
|
|
||||||
path("<str:agentid>/saltminion/", views.SaltMinion.as_view()),
|
|
||||||
path("<int:pk>/meshinfo/", views.MeshInfo.as_view()),
|
path("<int:pk>/meshinfo/", views.MeshInfo.as_view()),
|
||||||
path("meshexe/", views.MeshExe.as_view()),
|
path("meshexe/", views.MeshExe.as_view()),
|
||||||
path("sysinfo/", views.SysInfo.as_view()),
|
path("sysinfo/", views.SysInfo.as_view()),
|
||||||
|
|||||||
@@ -29,17 +29,110 @@ from winupdate.serializers import ApprovedUpdateSerializer
|
|||||||
from agents.tasks import (
|
from agents.tasks import (
|
||||||
agent_recovery_email_task,
|
agent_recovery_email_task,
|
||||||
agent_recovery_sms_task,
|
agent_recovery_sms_task,
|
||||||
sync_salt_modules_task,
|
|
||||||
)
|
)
|
||||||
from winupdate.tasks import check_for_updates_task
|
|
||||||
from software.tasks import install_chocolatey
|
|
||||||
from checks.utils import bytes2human
|
from checks.utils import bytes2human
|
||||||
from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList
|
from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
|
class CheckIn(APIView):
|
||||||
|
"""
|
||||||
|
The agent's checkin endpoint
|
||||||
|
patch: called every 45 to 110 seconds, handles agent updates and recovery
|
||||||
|
put: called every 5 to 10 minutes, handles basic system info
|
||||||
|
post: called once on windows service startup
|
||||||
|
"""
|
||||||
|
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def patch(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
agent.version = request.data["version"]
|
||||||
|
agent.last_seen = djangotime.now()
|
||||||
|
agent.save(update_fields=["version", "last_seen"])
|
||||||
|
|
||||||
|
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
|
||||||
|
last_outage = agent.agentoutages.last()
|
||||||
|
last_outage.recovery_time = djangotime.now()
|
||||||
|
last_outage.save(update_fields=["recovery_time"])
|
||||||
|
|
||||||
|
if agent.overdue_email_alert:
|
||||||
|
agent_recovery_email_task.delay(pk=last_outage.pk)
|
||||||
|
if agent.overdue_text_alert:
|
||||||
|
agent_recovery_sms_task.delay(pk=last_outage.pk)
|
||||||
|
|
||||||
|
recovery = agent.recoveryactions.filter(last_run=None).last()
|
||||||
|
if recovery is not None:
|
||||||
|
recovery.last_run = djangotime.now()
|
||||||
|
recovery.save(update_fields=["last_run"])
|
||||||
|
return Response(recovery.send())
|
||||||
|
|
||||||
|
# handle agent update
|
||||||
|
if agent.pendingactions.filter(
|
||||||
|
action_type="agentupdate", status="pending"
|
||||||
|
).exists():
|
||||||
|
update = agent.pendingactions.filter(
|
||||||
|
action_type="agentupdate", status="pending"
|
||||||
|
).last()
|
||||||
|
update.status = "completed"
|
||||||
|
update.save(update_fields=["status"])
|
||||||
|
return Response(update.details)
|
||||||
|
|
||||||
|
# get any pending actions
|
||||||
|
if agent.pendingactions.filter(status="pending").exists():
|
||||||
|
agent.handle_pending_actions()
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def put(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
if "disks" in request.data.keys():
|
||||||
|
disks = request.data["disks"]
|
||||||
|
new = []
|
||||||
|
# python agent
|
||||||
|
if isinstance(disks, dict):
|
||||||
|
for k, v in disks.items():
|
||||||
|
new.append(v)
|
||||||
|
else:
|
||||||
|
# golang agent
|
||||||
|
for disk in disks:
|
||||||
|
tmp = {}
|
||||||
|
for k, v in disk.items():
|
||||||
|
tmp["device"] = disk["device"]
|
||||||
|
tmp["fstype"] = disk["fstype"]
|
||||||
|
tmp["total"] = bytes2human(disk["total"])
|
||||||
|
tmp["used"] = bytes2human(disk["used"])
|
||||||
|
tmp["free"] = bytes2human(disk["free"])
|
||||||
|
tmp["percent"] = int(disk["percent"])
|
||||||
|
new.append(tmp)
|
||||||
|
|
||||||
|
serializer.save(disks=new)
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
if "logged_in_username" in request.data.keys():
|
||||||
|
if request.data["logged_in_username"] != "None":
|
||||||
|
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
serializer.save()
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
|
||||||
|
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save(last_seen=djangotime.now())
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class Hello(APIView):
|
class Hello(APIView):
|
||||||
|
#### DEPRECATED, for agents <= 1.1.9 ####
|
||||||
"""
|
"""
|
||||||
The agent's checkin endpoint
|
The agent's checkin endpoint
|
||||||
patch: called every 30 to 120 seconds
|
patch: called every 30 to 120 seconds
|
||||||
@@ -121,15 +214,6 @@ class Hello(APIView):
|
|||||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save(last_seen=djangotime.now())
|
serializer.save(last_seen=djangotime.now())
|
||||||
|
|
||||||
sync_salt_modules_task.delay(agent.pk)
|
|
||||||
check_for_updates_task.apply_async(
|
|
||||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
|
|
||||||
)
|
|
||||||
|
|
||||||
if not agent.choco_installed:
|
|
||||||
install_chocolatey.delay(agent.pk, wait=True)
|
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
@@ -153,23 +237,11 @@ class CheckRunner(APIView):
|
|||||||
return Response(ret)
|
return Response(ret)
|
||||||
|
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
from logs.models import AuditLog
|
|
||||||
|
|
||||||
check = get_object_or_404(Check, pk=request.data["id"])
|
check = get_object_or_404(Check, pk=request.data["id"])
|
||||||
check.last_run = djangotime.now()
|
check.last_run = djangotime.now()
|
||||||
check.save(update_fields=["last_run"])
|
check.save(update_fields=["last_run"])
|
||||||
status = check.handle_checkv2(request.data)
|
status = check.handle_checkv2(request.data)
|
||||||
|
|
||||||
# create audit entry
|
|
||||||
AuditLog.objects.create(
|
|
||||||
username=check.agent.hostname,
|
|
||||||
agent=check.agent.hostname,
|
|
||||||
object_type="agent",
|
|
||||||
action="check_run",
|
|
||||||
message=f"{check.readable_desc} was run on {check.agent.hostname}. Status: {status}",
|
|
||||||
after_value=Check.serialize(check),
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(status)
|
return Response(status)
|
||||||
|
|
||||||
|
|
||||||
@@ -211,77 +283,6 @@ class TaskRunner(APIView):
|
|||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class SaltMinion(APIView):
|
|
||||||
authentication_classes = [TokenAuthentication]
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
|
|
||||||
def get(self, request, agentid):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
|
||||||
ret = {
|
|
||||||
"latestVer": settings.LATEST_SALT_VER,
|
|
||||||
"currentVer": agent.salt_ver,
|
|
||||||
"salt_id": agent.salt_id,
|
|
||||||
"downloadURL": agent.winsalt_dl,
|
|
||||||
}
|
|
||||||
return Response(ret)
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
# accept the salt key
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
if agent.salt_id != request.data["saltid"]:
|
|
||||||
return notify_error("Salt keys do not match")
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = requests.post(
|
|
||||||
f"http://{settings.SALT_HOST}:8123/run",
|
|
||||||
json=[
|
|
||||||
{
|
|
||||||
"client": "wheel",
|
|
||||||
"fun": "key.accept",
|
|
||||||
"match": request.data["saltid"],
|
|
||||||
"username": settings.SALT_USERNAME,
|
|
||||||
"password": settings.SALT_PASSWORD,
|
|
||||||
"eauth": "pam",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
timeout=30,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
return notify_error("No communication between agent and salt-api")
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = resp.json()["return"][0]["data"]
|
|
||||||
minion = data["return"]["minions"][0]
|
|
||||||
except Exception:
|
|
||||||
return notify_error("Key error")
|
|
||||||
|
|
||||||
if data["success"] and minion == request.data["saltid"]:
|
|
||||||
return Response("Salt key was accepted")
|
|
||||||
else:
|
|
||||||
return notify_error("Not accepted")
|
|
||||||
|
|
||||||
def patch(self, request):
|
|
||||||
# sync modules
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
r = agent.salt_api_cmd(timeout=45, func="saltutil.sync_modules")
|
|
||||||
|
|
||||||
if r == "timeout" or r == "error":
|
|
||||||
return notify_error("Failed to sync salt modules")
|
|
||||||
|
|
||||||
if isinstance(r, list) and any("modules" in i for i in r):
|
|
||||||
return Response("Successfully synced salt modules")
|
|
||||||
elif isinstance(r, list) and not r:
|
|
||||||
return Response("Modules are already in sync")
|
|
||||||
else:
|
|
||||||
return notify_error(f"Failed to sync salt modules: {str(r)}")
|
|
||||||
|
|
||||||
def put(self, request):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
agent.salt_ver = request.data["ver"]
|
|
||||||
agent.save(update_fields=["salt_ver"])
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
|
|
||||||
class WinUpdater(APIView):
|
class WinUpdater(APIView):
|
||||||
|
|
||||||
authentication_classes = [TokenAuthentication]
|
authentication_classes = [TokenAuthentication]
|
||||||
@@ -343,17 +344,8 @@ class WinUpdater(APIView):
|
|||||||
if reboot:
|
if reboot:
|
||||||
if agent.has_nats:
|
if agent.has_nats:
|
||||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||||
else:
|
logger.info(
|
||||||
agent.salt_api_async(
|
f"{agent.hostname} is rebooting after updates were installed."
|
||||||
func="system.reboot",
|
|
||||||
arg=7,
|
|
||||||
kwargs={"in_seconds": True},
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
|
||||||
else:
|
|
||||||
check_for_updates_task.apply_async(
|
|
||||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|||||||
@@ -6,11 +6,11 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('automation', '0005_auto_20200922_1344'),
|
("automation", "0005_auto_20200922_1344"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.DeleteModel(
|
migrations.DeleteModel(
|
||||||
name='PolicyExclusions',
|
name="PolicyExclusions",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from clients.models import Site, Client
|
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
|
||||||
@@ -58,6 +57,11 @@ class Policy(BaseAuditModel):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def cascade_policy_tasks(agent):
|
def cascade_policy_tasks(agent):
|
||||||
|
from autotasks.tasks import delete_win_task_schedule
|
||||||
|
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
from logs.models import PendingAction
|
||||||
|
|
||||||
# List of all tasks to be applied
|
# List of all tasks to be applied
|
||||||
tasks = list()
|
tasks = list()
|
||||||
added_task_pks = list()
|
added_task_pks = list()
|
||||||
@@ -80,7 +84,7 @@ class Policy(BaseAuditModel):
|
|||||||
default_policy = CoreSettings.objects.first().server_policy
|
default_policy = CoreSettings.objects.first().server_policy
|
||||||
client_policy = client.server_policy
|
client_policy = client.server_policy
|
||||||
site_policy = site.server_policy
|
site_policy = site.server_policy
|
||||||
else:
|
elif agent.monitoring_type == "workstation":
|
||||||
default_policy = CoreSettings.objects.first().workstation_policy
|
default_policy = CoreSettings.objects.first().workstation_policy
|
||||||
client_policy = client.workstation_policy
|
client_policy = client.workstation_policy
|
||||||
site_policy = site.workstation_policy
|
site_policy = site.workstation_policy
|
||||||
@@ -107,6 +111,33 @@ class Policy(BaseAuditModel):
|
|||||||
tasks.append(task)
|
tasks.append(task)
|
||||||
added_task_pks.append(task.pk)
|
added_task_pks.append(task.pk)
|
||||||
|
|
||||||
|
# remove policy tasks from agent not included in policy
|
||||||
|
for task in agent.autotasks.filter(
|
||||||
|
parent_task__in=[
|
||||||
|
taskpk
|
||||||
|
for taskpk in agent_tasks_parent_pks
|
||||||
|
if taskpk not in added_task_pks
|
||||||
|
]
|
||||||
|
):
|
||||||
|
delete_win_task_schedule.delay(task.pk)
|
||||||
|
|
||||||
|
# handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline
|
||||||
|
for action in agent.pendingactions.exclude(status="completed"):
|
||||||
|
task = AutomatedTask.objects.get(pk=action.details["task_id"])
|
||||||
|
if (
|
||||||
|
task.parent_task in agent_tasks_parent_pks
|
||||||
|
and task.parent_task in added_task_pks
|
||||||
|
):
|
||||||
|
agent.remove_matching_pending_task_actions(task.id)
|
||||||
|
|
||||||
|
PendingAction(
|
||||||
|
agent=agent,
|
||||||
|
action_type="taskaction",
|
||||||
|
details={"action": "taskcreate", "task_id": task.id},
|
||||||
|
).save()
|
||||||
|
task.sync_status = "notsynced"
|
||||||
|
task.save(update_fields=["sync_status"])
|
||||||
|
|
||||||
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -132,7 +163,7 @@ class Policy(BaseAuditModel):
|
|||||||
default_policy = CoreSettings.objects.first().server_policy
|
default_policy = CoreSettings.objects.first().server_policy
|
||||||
client_policy = client.server_policy
|
client_policy = client.server_policy
|
||||||
site_policy = site.server_policy
|
site_policy = site.server_policy
|
||||||
else:
|
elif agent.monitoring_type == "workstation":
|
||||||
default_policy = CoreSettings.objects.first().workstation_policy
|
default_policy = CoreSettings.objects.first().workstation_policy
|
||||||
client_policy = client.workstation_policy
|
client_policy = client.workstation_policy
|
||||||
site_policy = site.workstation_policy
|
site_policy = site.workstation_policy
|
||||||
@@ -280,6 +311,15 @@ class Policy(BaseAuditModel):
|
|||||||
+ eventlog_checks
|
+ eventlog_checks
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# remove policy checks from agent that fell out of policy scope
|
||||||
|
agent.agentchecks.filter(
|
||||||
|
parent_check__in=[
|
||||||
|
checkpk
|
||||||
|
for checkpk in agent_checks_parent_pks
|
||||||
|
if checkpk not in [check.pk for check in final_list]
|
||||||
|
]
|
||||||
|
).delete()
|
||||||
|
|
||||||
return [
|
return [
|
||||||
check for check in final_list if check.pk not in agent_checks_parent_pks
|
check for check in final_list if check.pk not in agent_checks_parent_pks
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,46 +6,42 @@ from tacticalrmm.celery import app
|
|||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def generate_agent_checks_from_policies_task(
|
def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||||
###
|
|
||||||
# copies the policy checks to all affected agents
|
|
||||||
#
|
|
||||||
# clear: clears all policy checks first
|
|
||||||
# create_tasks: also create tasks after checks are generated
|
|
||||||
###
|
|
||||||
policypk,
|
|
||||||
clear=False,
|
|
||||||
create_tasks=False,
|
|
||||||
):
|
|
||||||
|
|
||||||
policy = Policy.objects.get(pk=policypk)
|
policy = Policy.objects.get(pk=policypk)
|
||||||
for agent in policy.related_agents():
|
|
||||||
agent.generate_checks_from_policies(clear=clear)
|
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||||
|
agents = Agent.objects.all()
|
||||||
|
elif policy.is_default_server_policy:
|
||||||
|
agents = Agent.objects.filter(monitoring_type="server")
|
||||||
|
elif policy.is_default_workstation_policy:
|
||||||
|
agents = Agent.objects.filter(monitoring_type="workstation")
|
||||||
|
else:
|
||||||
|
agents = policy.related_agents()
|
||||||
|
|
||||||
|
for agent in agents:
|
||||||
|
agent.generate_checks_from_policies()
|
||||||
if create_tasks:
|
if create_tasks:
|
||||||
agent.generate_tasks_from_policies(
|
agent.generate_tasks_from_policies()
|
||||||
clear=clear,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def generate_agent_checks_by_location_task(
|
def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False):
|
||||||
location, mon_type, clear=False, create_tasks=False
|
|
||||||
):
|
|
||||||
|
|
||||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||||
agent.generate_checks_from_policies(clear=clear)
|
agent.generate_checks_from_policies()
|
||||||
|
|
||||||
if create_tasks:
|
if create_tasks:
|
||||||
agent.generate_tasks_from_policies(clear=clear)
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def generate_all_agent_checks_task(mon_type, clear=False, create_tasks=False):
|
def generate_all_agent_checks_task(mon_type, create_tasks=False):
|
||||||
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
||||||
agent.generate_checks_from_policies(clear=clear)
|
agent.generate_checks_from_policies()
|
||||||
|
|
||||||
if create_tasks:
|
if create_tasks:
|
||||||
agent.generate_tasks_from_policies(clear=clear)
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
@@ -83,18 +79,28 @@ def update_policy_check_fields_task(checkpk):
|
|||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def generate_agent_tasks_from_policies_task(policypk, clear=False):
|
def generate_agent_tasks_from_policies_task(policypk):
|
||||||
|
|
||||||
policy = Policy.objects.get(pk=policypk)
|
policy = Policy.objects.get(pk=policypk)
|
||||||
for agent in policy.related_agents():
|
|
||||||
agent.generate_tasks_from_policies(clear=clear)
|
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||||
|
agents = Agent.objects.all()
|
||||||
|
elif policy.is_default_server_policy:
|
||||||
|
agents = Agent.objects.filter(monitoring_type="server")
|
||||||
|
elif policy.is_default_workstation_policy:
|
||||||
|
agents = Agent.objects.filter(monitoring_type="workstation")
|
||||||
|
else:
|
||||||
|
agents = policy.related_agents()
|
||||||
|
|
||||||
|
for agent in agents:
|
||||||
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def generate_agent_tasks_by_location_task(location, mon_type, clear=False):
|
def generate_agent_tasks_by_location_task(location, mon_type):
|
||||||
|
|
||||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||||
agent.generate_tasks_from_policies(clear=clear)
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
|
|||||||
@@ -121,9 +121,7 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
|
|
||||||
resp = self.client.put(url, data, format="json")
|
resp = self.client.put(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
mock_checks_task.assert_called_with(
|
mock_checks_task.assert_called_with(policypk=policy.pk, create_tasks=True)
|
||||||
policypk=policy.pk, clear=True, create_tasks=True
|
|
||||||
)
|
|
||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
@@ -140,8 +138,8 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
resp = self.client.delete(url, format="json")
|
resp = self.client.delete(url, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
mock_checks_task.assert_called_with(policypk=policy.pk, clear=True)
|
mock_checks_task.assert_called_with(policypk=policy.pk)
|
||||||
mock_tasks_task.assert_called_with(policypk=policy.pk, clear=True)
|
mock_tasks_task.assert_called_with(policypk=policy.pk)
|
||||||
|
|
||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
@@ -298,7 +296,6 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
mock_checks_location_task.assert_called_with(
|
mock_checks_location_task.assert_called_with(
|
||||||
location={"site__client_id": client.id},
|
location={"site__client_id": client.id},
|
||||||
mon_type="server",
|
mon_type="server",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
mock_checks_location_task.reset_mock()
|
mock_checks_location_task.reset_mock()
|
||||||
@@ -311,7 +308,6 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
mock_checks_location_task.assert_called_with(
|
mock_checks_location_task.assert_called_with(
|
||||||
location={"site__client_id": client.id},
|
location={"site__client_id": client.id},
|
||||||
mon_type="workstation",
|
mon_type="workstation",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
mock_checks_location_task.reset_mock()
|
mock_checks_location_task.reset_mock()
|
||||||
@@ -324,7 +320,6 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
mock_checks_location_task.assert_called_with(
|
mock_checks_location_task.assert_called_with(
|
||||||
location={"site_id": site.id},
|
location={"site_id": site.id},
|
||||||
mon_type="server",
|
mon_type="server",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
mock_checks_location_task.reset_mock()
|
mock_checks_location_task.reset_mock()
|
||||||
@@ -337,7 +332,6 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
mock_checks_location_task.assert_called_with(
|
mock_checks_location_task.assert_called_with(
|
||||||
location={"site_id": site.id},
|
location={"site_id": site.id},
|
||||||
mon_type="workstation",
|
mon_type="workstation",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
mock_checks_location_task.reset_mock()
|
mock_checks_location_task.reset_mock()
|
||||||
@@ -347,7 +341,7 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
# called because the relation changed
|
# called because the relation changed
|
||||||
mock_checks_task.assert_called_with(clear=True)
|
mock_checks_task.assert_called()
|
||||||
mock_checks_task.reset_mock()
|
mock_checks_task.reset_mock()
|
||||||
|
|
||||||
# Adding the same relations shouldn't trigger mocks
|
# Adding the same relations shouldn't trigger mocks
|
||||||
@@ -396,7 +390,6 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
mock_checks_location_task.assert_called_with(
|
mock_checks_location_task.assert_called_with(
|
||||||
location={"site__client_id": client.id},
|
location={"site__client_id": client.id},
|
||||||
mon_type="server",
|
mon_type="server",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
mock_checks_location_task.reset_mock()
|
mock_checks_location_task.reset_mock()
|
||||||
@@ -409,7 +402,6 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
mock_checks_location_task.assert_called_with(
|
mock_checks_location_task.assert_called_with(
|
||||||
location={"site__client_id": client.id},
|
location={"site__client_id": client.id},
|
||||||
mon_type="workstation",
|
mon_type="workstation",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
mock_checks_location_task.reset_mock()
|
mock_checks_location_task.reset_mock()
|
||||||
@@ -422,7 +414,6 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
mock_checks_location_task.assert_called_with(
|
mock_checks_location_task.assert_called_with(
|
||||||
location={"site_id": site.id},
|
location={"site_id": site.id},
|
||||||
mon_type="server",
|
mon_type="server",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
mock_checks_location_task.reset_mock()
|
mock_checks_location_task.reset_mock()
|
||||||
@@ -435,7 +426,6 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
mock_checks_location_task.assert_called_with(
|
mock_checks_location_task.assert_called_with(
|
||||||
location={"site_id": site.id},
|
location={"site_id": site.id},
|
||||||
mon_type="workstation",
|
mon_type="workstation",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
mock_checks_location_task.reset_mock()
|
mock_checks_location_task.reset_mock()
|
||||||
@@ -444,7 +434,7 @@ class TestPolicyViews(TacticalTestCase):
|
|||||||
resp = self.client.post(url, agent_payload, format="json")
|
resp = self.client.post(url, agent_payload, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
# called because the relation changed
|
# called because the relation changed
|
||||||
mock_checks_task.assert_called_with(clear=True)
|
mock_checks_task.assert_called()
|
||||||
mock_checks_task.reset_mock()
|
mock_checks_task.reset_mock()
|
||||||
|
|
||||||
# adding the same relations shouldn't trigger mocks
|
# adding the same relations shouldn't trigger mocks
|
||||||
@@ -753,7 +743,7 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
||||||
|
|
||||||
# test policy assigned to agent
|
# test policy assigned to agent
|
||||||
generate_agent_checks_from_policies_task(policy.id, clear=True)
|
generate_agent_checks_from_policies_task(policy.id)
|
||||||
|
|
||||||
# make sure all checks were created. should be 7
|
# make sure all checks were created. should be 7
|
||||||
agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all()
|
agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all()
|
||||||
@@ -832,7 +822,6 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
generate_agent_checks_by_location_task(
|
generate_agent_checks_by_location_task(
|
||||||
{"site_id": sites[0].id},
|
{"site_id": sites[0].id},
|
||||||
"server",
|
"server",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -846,7 +835,6 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
generate_agent_checks_by_location_task(
|
generate_agent_checks_by_location_task(
|
||||||
{"site__client_id": clients[0].id},
|
{"site__client_id": clients[0].id},
|
||||||
"workstation",
|
"workstation",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
# workstation_agent should now have policy checks and the other agents should not
|
# workstation_agent should now have policy checks and the other agents should not
|
||||||
@@ -875,7 +863,7 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
core.workstation_policy = policy
|
core.workstation_policy = policy
|
||||||
core.save()
|
core.save()
|
||||||
|
|
||||||
generate_all_agent_checks_task("server", clear=True, create_tasks=True)
|
generate_all_agent_checks_task("server", create_tasks=True)
|
||||||
|
|
||||||
# all servers should have 7 checks
|
# all servers should have 7 checks
|
||||||
for agent in server_agents:
|
for agent in server_agents:
|
||||||
@@ -884,7 +872,7 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
for agent in workstation_agents:
|
for agent in workstation_agents:
|
||||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||||
|
|
||||||
generate_all_agent_checks_task("workstation", clear=True, create_tasks=True)
|
generate_all_agent_checks_task("workstation", create_tasks=True)
|
||||||
|
|
||||||
# all agents should have 7 checks now
|
# all agents should have 7 checks now
|
||||||
for agent in server_agents:
|
for agent in server_agents:
|
||||||
@@ -961,7 +949,7 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
site = baker.make("clients.Site")
|
site = baker.make("clients.Site")
|
||||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
||||||
|
|
||||||
generate_agent_tasks_from_policies_task(policy.id, clear=True)
|
generate_agent_tasks_from_policies_task(policy.id)
|
||||||
|
|
||||||
agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all()
|
agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all()
|
||||||
|
|
||||||
@@ -1000,9 +988,7 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
agent1 = baker.make_recipe("agents.agent", site=sites[1])
|
agent1 = baker.make_recipe("agents.agent", site=sites[1])
|
||||||
agent2 = baker.make_recipe("agents.agent", site=sites[3])
|
agent2 = baker.make_recipe("agents.agent", site=sites[3])
|
||||||
|
|
||||||
generate_agent_tasks_by_location_task(
|
generate_agent_tasks_by_location_task({"site_id": sites[0].id}, "server")
|
||||||
{"site_id": sites[0].id}, "server", clear=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# all servers in site1 and site2 should have 3 tasks
|
# all servers in site1 and site2 should have 3 tasks
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@@ -1013,7 +999,7 @@ class TestPolicyTasks(TacticalTestCase):
|
|||||||
self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0)
|
self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0)
|
||||||
|
|
||||||
generate_agent_tasks_by_location_task(
|
generate_agent_tasks_by_location_task(
|
||||||
{"site__client_id": clients[0].id}, "workstation", clear=True
|
{"site__client_id": clients[0].id}, "workstation"
|
||||||
)
|
)
|
||||||
|
|
||||||
# all workstations in Default1 should have 3 tasks
|
# all workstations in Default1 should have 3 tasks
|
||||||
|
|||||||
@@ -83,7 +83,6 @@ class GetUpdateDeletePolicy(APIView):
|
|||||||
if saved_policy.active != old_active or saved_policy.enforced != old_enforced:
|
if saved_policy.active != old_active or saved_policy.enforced != old_enforced:
|
||||||
generate_agent_checks_from_policies_task.delay(
|
generate_agent_checks_from_policies_task.delay(
|
||||||
policypk=policy.pk,
|
policypk=policy.pk,
|
||||||
clear=(not saved_policy.active or not saved_policy.enforced),
|
|
||||||
create_tasks=(saved_policy.active != old_active),
|
create_tasks=(saved_policy.active != old_active),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -93,8 +92,8 @@ class GetUpdateDeletePolicy(APIView):
|
|||||||
policy = get_object_or_404(Policy, pk=pk)
|
policy = get_object_or_404(Policy, pk=pk)
|
||||||
|
|
||||||
# delete all managed policy checks off of agents
|
# delete all managed policy checks off of agents
|
||||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk, clear=True)
|
generate_agent_checks_from_policies_task.delay(policypk=policy.pk)
|
||||||
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk, clear=True)
|
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk)
|
||||||
policy.delete()
|
policy.delete()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
@@ -218,7 +217,6 @@ class GetRelated(APIView):
|
|||||||
generate_agent_checks_by_location_task.delay(
|
generate_agent_checks_by_location_task.delay(
|
||||||
location={"site__client_id": client.id},
|
location={"site__client_id": client.id},
|
||||||
mon_type="workstation",
|
mon_type="workstation",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -236,7 +234,6 @@ class GetRelated(APIView):
|
|||||||
generate_agent_checks_by_location_task.delay(
|
generate_agent_checks_by_location_task.delay(
|
||||||
location={"site_id": site.id},
|
location={"site_id": site.id},
|
||||||
mon_type="workstation",
|
mon_type="workstation",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -258,7 +255,6 @@ class GetRelated(APIView):
|
|||||||
generate_agent_checks_by_location_task.delay(
|
generate_agent_checks_by_location_task.delay(
|
||||||
location={"site__client_id": client.id},
|
location={"site__client_id": client.id},
|
||||||
mon_type="server",
|
mon_type="server",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -276,7 +272,6 @@ class GetRelated(APIView):
|
|||||||
generate_agent_checks_by_location_task.delay(
|
generate_agent_checks_by_location_task.delay(
|
||||||
location={"site_id": site.id},
|
location={"site_id": site.id},
|
||||||
mon_type="server",
|
mon_type="server",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -296,7 +291,6 @@ class GetRelated(APIView):
|
|||||||
generate_agent_checks_by_location_task.delay(
|
generate_agent_checks_by_location_task.delay(
|
||||||
location={"site__client_id": client.id},
|
location={"site__client_id": client.id},
|
||||||
mon_type="workstation",
|
mon_type="workstation",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -311,7 +305,6 @@ class GetRelated(APIView):
|
|||||||
generate_agent_checks_by_location_task.delay(
|
generate_agent_checks_by_location_task.delay(
|
||||||
location={"site_id": site.id},
|
location={"site_id": site.id},
|
||||||
mon_type="workstation",
|
mon_type="workstation",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -329,7 +322,6 @@ class GetRelated(APIView):
|
|||||||
generate_agent_checks_by_location_task.delay(
|
generate_agent_checks_by_location_task.delay(
|
||||||
location={"site__client_id": client.id},
|
location={"site__client_id": client.id},
|
||||||
mon_type="server",
|
mon_type="server",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -343,7 +335,6 @@ class GetRelated(APIView):
|
|||||||
generate_agent_checks_by_location_task.delay(
|
generate_agent_checks_by_location_task.delay(
|
||||||
location={"site_id": site.pk},
|
location={"site_id": site.pk},
|
||||||
mon_type="server",
|
mon_type="server",
|
||||||
clear=True,
|
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -358,14 +349,14 @@ class GetRelated(APIView):
|
|||||||
if not agent.policy or agent.policy and agent.policy.pk != policy.pk:
|
if not agent.policy or agent.policy and agent.policy.pk != policy.pk:
|
||||||
agent.policy = policy
|
agent.policy = policy
|
||||||
agent.save()
|
agent.save()
|
||||||
agent.generate_checks_from_policies(clear=True)
|
agent.generate_checks_from_policies()
|
||||||
agent.generate_tasks_from_policies(clear=True)
|
agent.generate_tasks_from_policies()
|
||||||
else:
|
else:
|
||||||
if agent.policy:
|
if agent.policy:
|
||||||
agent.policy = None
|
agent.policy = None
|
||||||
agent.save()
|
agent.save()
|
||||||
agent.generate_checks_from_policies(clear=True)
|
agent.generate_checks_from_policies()
|
||||||
agent.generate_tasks_from_policies(clear=True)
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|||||||
@@ -6,13 +6,13 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('autotasks', '0008_auto_20201030_1515'),
|
("autotasks", "0008_auto_20201030_1515"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='automatedtask',
|
model_name="automatedtask",
|
||||||
name='run_time_bit_weekdays',
|
name="run_time_bit_weekdays",
|
||||||
field=models.IntegerField(blank=True, null=True),
|
field=models.IntegerField(blank=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import datetime as dt
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db.models.fields import DateTimeField
|
from django.db.models.fields import DateTimeField
|
||||||
from automation.models import Policy
|
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
from tacticalrmm.utils import bitdays_to_string
|
from tacticalrmm.utils import bitdays_to_string
|
||||||
|
|
||||||
@@ -43,7 +42,7 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
blank=True,
|
blank=True,
|
||||||
)
|
)
|
||||||
policy = models.ForeignKey(
|
policy = models.ForeignKey(
|
||||||
Policy,
|
"automation.Policy",
|
||||||
related_name="autotasks",
|
related_name="autotasks",
|
||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
|
|||||||
@@ -76,9 +76,14 @@ def create_win_task_schedule(pk, pending_action=False):
|
|||||||
return "error"
|
return "error"
|
||||||
|
|
||||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||||
|
|
||||||
if r != "ok":
|
if r != "ok":
|
||||||
# don't create pending action if this task was initiated by a pending action
|
# don't create pending action if this task was initiated by a pending action
|
||||||
if not pending_action:
|
if not pending_action:
|
||||||
|
|
||||||
|
# complete any other pending actions on agent with same task_id
|
||||||
|
task.agent.remove_matching_pending_task_actions(task.id)
|
||||||
|
|
||||||
PendingAction(
|
PendingAction(
|
||||||
agent=task.agent,
|
agent=task.agent,
|
||||||
action_type="taskaction",
|
action_type="taskaction",
|
||||||
@@ -144,6 +149,7 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
|||||||
|
|
||||||
task.sync_status = "synced"
|
task.sync_status = "synced"
|
||||||
task.save(update_fields=["sync_status"])
|
task.save(update_fields=["sync_status"])
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@@ -157,9 +163,13 @@ def delete_win_task_schedule(pk, pending_action=False):
|
|||||||
}
|
}
|
||||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||||
|
|
||||||
if r != "ok":
|
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||||
# don't create pending action if this task was initiated by a pending action
|
# don't create pending action if this task was initiated by a pending action
|
||||||
if not pending_action:
|
if not pending_action:
|
||||||
|
|
||||||
|
# complete any other pending actions on agent with same task_id
|
||||||
|
task.agent.remove_matching_pending_task_actions(task.id)
|
||||||
|
|
||||||
PendingAction(
|
PendingAction(
|
||||||
agent=task.agent,
|
agent=task.agent,
|
||||||
action_type="taskaction",
|
action_type="taskaction",
|
||||||
@@ -168,7 +178,7 @@ def delete_win_task_schedule(pk, pending_action=False):
|
|||||||
task.sync_status = "pendingdeletion"
|
task.sync_status = "pendingdeletion"
|
||||||
task.save(update_fields=["sync_status"])
|
task.save(update_fields=["sync_status"])
|
||||||
|
|
||||||
return
|
return "timeout"
|
||||||
|
|
||||||
# complete pending action since it was successful
|
# complete pending action since it was successful
|
||||||
if pending_action:
|
if pending_action:
|
||||||
@@ -176,6 +186,9 @@ def delete_win_task_schedule(pk, pending_action=False):
|
|||||||
pendingaction.status = "completed"
|
pendingaction.status = "completed"
|
||||||
pendingaction.save(update_fields=["status"])
|
pendingaction.save(update_fields=["status"])
|
||||||
|
|
||||||
|
# complete any other pending actions on agent with same task_id
|
||||||
|
task.agent.remove_matching_pending_task_actions(task.id)
|
||||||
|
|
||||||
task.delete()
|
task.delete()
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from .models import Check
|
from .models import Check, CheckHistory
|
||||||
|
|
||||||
admin.site.register(Check)
|
admin.site.register(Check)
|
||||||
|
admin.site.register(CheckHistory)
|
||||||
|
|||||||
30
api/tacticalrmm/checks/migrations/0011_check_run_history.py
Normal file
30
api/tacticalrmm/checks/migrations/0011_check_run_history.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-09 02:56
|
||||||
|
|
||||||
|
import django.contrib.postgres.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0010_auto_20200922_1344"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="check",
|
||||||
|
name="run_history",
|
||||||
|
field=django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=models.PositiveIntegerField(),
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
size=None,
|
||||||
|
),
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
null=True,
|
||||||
|
size=None,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
39
api/tacticalrmm/checks/migrations/0011_checkhistory.py
Normal file
39
api/tacticalrmm/checks/migrations/0011_checkhistory.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-09 21:36
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0010_auto_20200922_1344"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="CheckHistory",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
auto_created=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
verbose_name="ID",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("x", models.DateTimeField()),
|
||||||
|
("y", models.PositiveIntegerField()),
|
||||||
|
("results", models.JSONField(blank=True, null=True)),
|
||||||
|
(
|
||||||
|
"check_history",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="check_history",
|
||||||
|
to="checks.check",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
18
api/tacticalrmm/checks/migrations/0012_auto_20210110_0503.py
Normal file
18
api/tacticalrmm/checks/migrations/0012_auto_20210110_0503.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-10 05:03
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0011_checkhistory"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="checkhistory",
|
||||||
|
name="y",
|
||||||
|
field=models.PositiveIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py
Normal file
18
api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-10 05:05
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0012_auto_20210110_0503"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="checkhistory",
|
||||||
|
name="y",
|
||||||
|
field=models.PositiveIntegerField(null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0013_auto_20210110_0505"),
|
||||||
|
("checks", "0011_check_run_history"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
||||||
27
api/tacticalrmm/checks/migrations/0015_auto_20210110_1808.py
Normal file
27
api/tacticalrmm/checks/migrations/0015_auto_20210110_1808.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0014_merge_20210110_1808"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="check",
|
||||||
|
name="run_history",
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="checkhistory",
|
||||||
|
name="x",
|
||||||
|
field=models.DateTimeField(auto_now_add=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="checkhistory",
|
||||||
|
name="y",
|
||||||
|
field=models.PositiveIntegerField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -3,12 +3,13 @@ import string
|
|||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import pytz
|
import pytz
|
||||||
from statistics import mean
|
from statistics import mean, mode
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||||
|
from rest_framework.fields import JSONField
|
||||||
|
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
@@ -214,6 +215,9 @@ class Check(BaseAuditModel):
|
|||||||
"modified_time",
|
"modified_time",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def add_check_history(self, value, more_info=None):
|
||||||
|
CheckHistory.objects.create(check_history=self, y=value, results=more_info)
|
||||||
|
|
||||||
def handle_checkv2(self, data):
|
def handle_checkv2(self, data):
|
||||||
# cpuload or mem checks
|
# cpuload or mem checks
|
||||||
if self.check_type == "cpuload" or self.check_type == "memory":
|
if self.check_type == "cpuload" or self.check_type == "memory":
|
||||||
@@ -232,6 +236,9 @@ class Check(BaseAuditModel):
|
|||||||
else:
|
else:
|
||||||
self.status = "passing"
|
self.status = "passing"
|
||||||
|
|
||||||
|
# add check history
|
||||||
|
self.add_check_history(data["percent"])
|
||||||
|
|
||||||
# diskspace checks
|
# diskspace checks
|
||||||
elif self.check_type == "diskspace":
|
elif self.check_type == "diskspace":
|
||||||
if data["exists"]:
|
if data["exists"]:
|
||||||
@@ -245,6 +252,9 @@ class Check(BaseAuditModel):
|
|||||||
self.status = "passing"
|
self.status = "passing"
|
||||||
|
|
||||||
self.more_info = f"Total: {total}B, Free: {free}B"
|
self.more_info = f"Total: {total}B, Free: {free}B"
|
||||||
|
|
||||||
|
# add check history
|
||||||
|
self.add_check_history(percent_used)
|
||||||
else:
|
else:
|
||||||
self.status = "failing"
|
self.status = "failing"
|
||||||
self.more_info = f"Disk {self.disk} does not exist"
|
self.more_info = f"Disk {self.disk} does not exist"
|
||||||
@@ -277,6 +287,17 @@ class Check(BaseAuditModel):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# add check history
|
||||||
|
self.add_check_history(
|
||||||
|
1 if self.status == "failing" else 0,
|
||||||
|
{
|
||||||
|
"retcode": data["retcode"],
|
||||||
|
"stdout": data["stdout"][:60],
|
||||||
|
"stderr": data["stderr"][:60],
|
||||||
|
"execution_time": self.execution_time,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
# ping checks
|
# ping checks
|
||||||
elif self.check_type == "ping":
|
elif self.check_type == "ping":
|
||||||
success = ["Reply", "bytes", "time", "TTL"]
|
success = ["Reply", "bytes", "time", "TTL"]
|
||||||
@@ -293,6 +314,10 @@ class Check(BaseAuditModel):
|
|||||||
self.more_info = output
|
self.more_info = output
|
||||||
self.save(update_fields=["more_info"])
|
self.save(update_fields=["more_info"])
|
||||||
|
|
||||||
|
self.add_check_history(
|
||||||
|
1 if self.status == "failing" else 0, self.more_info[:60]
|
||||||
|
)
|
||||||
|
|
||||||
# windows service checks
|
# windows service checks
|
||||||
elif self.check_type == "winsvc":
|
elif self.check_type == "winsvc":
|
||||||
svc_stat = data["status"]
|
svc_stat = data["status"]
|
||||||
@@ -332,6 +357,10 @@ class Check(BaseAuditModel):
|
|||||||
|
|
||||||
self.save(update_fields=["more_info"])
|
self.save(update_fields=["more_info"])
|
||||||
|
|
||||||
|
self.add_check_history(
|
||||||
|
1 if self.status == "failing" else 0, self.more_info[:60]
|
||||||
|
)
|
||||||
|
|
||||||
elif self.check_type == "eventlog":
|
elif self.check_type == "eventlog":
|
||||||
log = []
|
log = []
|
||||||
is_wildcard = self.event_id_is_wildcard
|
is_wildcard = self.event_id_is_wildcard
|
||||||
@@ -391,6 +420,11 @@ class Check(BaseAuditModel):
|
|||||||
self.extra_details = {"log": log}
|
self.extra_details = {"log": log}
|
||||||
self.save(update_fields=["extra_details"])
|
self.save(update_fields=["extra_details"])
|
||||||
|
|
||||||
|
self.add_check_history(
|
||||||
|
1 if self.status == "failing" else 0,
|
||||||
|
"Events Found:" + str(len(self.extra_details["log"])),
|
||||||
|
)
|
||||||
|
|
||||||
# handle status
|
# handle status
|
||||||
if self.status == "failing":
|
if self.status == "failing":
|
||||||
self.fail_count += 1
|
self.fail_count += 1
|
||||||
@@ -645,3 +679,17 @@ class Check(BaseAuditModel):
|
|||||||
body = subject
|
body = subject
|
||||||
|
|
||||||
CORE.send_sms(body)
|
CORE.send_sms(body)
|
||||||
|
|
||||||
|
|
||||||
|
class CheckHistory(models.Model):
|
||||||
|
check_history = models.ForeignKey(
|
||||||
|
Check,
|
||||||
|
related_name="check_history",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
x = models.DateTimeField(auto_now_add=True)
|
||||||
|
y = models.PositiveIntegerField(null=True, blank=True, default=None)
|
||||||
|
results = models.JSONField(null=True, blank=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.check_history.readable_desc
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import validators as _v
|
import validators as _v
|
||||||
|
import pytz
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from .models import Check
|
from .models import Check, CheckHistory
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
from scripts.serializers import ScriptSerializer, ScriptCheckSerializer
|
from scripts.serializers import ScriptSerializer, ScriptCheckSerializer
|
||||||
|
|
||||||
@@ -65,6 +65,26 @@ class CheckSerializer(serializers.ModelSerializer):
|
|||||||
"Please enter a valid IP address or domain name"
|
"Please enter a valid IP address or domain name"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if check_type == "cpuload" and not self.instance:
|
||||||
|
if (
|
||||||
|
Check.objects.filter(**self.context, check_type="cpuload")
|
||||||
|
.exclude(managed_by_policy=True)
|
||||||
|
.exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"A cpuload check for this agent already exists"
|
||||||
|
)
|
||||||
|
|
||||||
|
if check_type == "memory" and not self.instance:
|
||||||
|
if (
|
||||||
|
Check.objects.filter(**self.context, check_type="memory")
|
||||||
|
.exclude(managed_by_policy=True)
|
||||||
|
.exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"A memory check for this agent already exists"
|
||||||
|
)
|
||||||
|
|
||||||
return val
|
return val
|
||||||
|
|
||||||
|
|
||||||
@@ -217,3 +237,15 @@ class CheckResultsSerializer(serializers.ModelSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
model = Check
|
model = Check
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class CheckHistorySerializer(serializers.ModelSerializer):
|
||||||
|
x = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
def get_x(self, obj):
|
||||||
|
return obj.x.astimezone(pytz.timezone(self.context["timezone"])).isoformat()
|
||||||
|
|
||||||
|
# used for return large amounts of graph data
|
||||||
|
class Meta:
|
||||||
|
model = CheckHistory
|
||||||
|
fields = ("x", "y", "results")
|
||||||
|
|||||||
@@ -5,8 +5,6 @@ from time import sleep
|
|||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def handle_check_email_alert_task(pk):
|
def handle_check_email_alert_task(pk):
|
||||||
@@ -56,3 +54,15 @@ def handle_check_sms_alert_task(pk):
|
|||||||
check.save(update_fields=["text_sent"])
|
check.save(update_fields=["text_sent"])
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def prune_check_history(older_than_days: int) -> str:
|
||||||
|
from .models import CheckHistory
|
||||||
|
|
||||||
|
CheckHistory.objects.filter(
|
||||||
|
x__lt=djangotime.make_aware(dt.datetime.today())
|
||||||
|
- djangotime.timedelta(days=older_than_days)
|
||||||
|
).delete()
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
from checks.models import CheckHistory
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
from .serializers import CheckSerializer
|
from .serializers import CheckSerializer
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
@@ -8,6 +10,7 @@ from itertools import cycle
|
|||||||
class TestCheckViews(TacticalTestCase):
|
class TestCheckViews(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.authenticate()
|
self.authenticate()
|
||||||
|
self.setup_coresettings()
|
||||||
|
|
||||||
def test_get_disk_check(self):
|
def test_get_disk_check(self):
|
||||||
# setup data
|
# setup data
|
||||||
@@ -55,6 +58,52 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
resp = self.client.post(url, invalid_payload, format="json")
|
resp = self.client.post(url, invalid_payload, format="json")
|
||||||
self.assertEqual(resp.status_code, 400)
|
self.assertEqual(resp.status_code, 400)
|
||||||
|
|
||||||
|
def test_add_cpuload_check(self):
|
||||||
|
url = "/checks/checks/"
|
||||||
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
payload = {
|
||||||
|
"pk": agent.pk,
|
||||||
|
"check": {
|
||||||
|
"check_type": "cpuload",
|
||||||
|
"threshold": 66,
|
||||||
|
"fails_b4_alert": 9,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.post(url, payload, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
payload["threshold"] = 87
|
||||||
|
resp = self.client.post(url, payload, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 400)
|
||||||
|
self.assertEqual(
|
||||||
|
resp.json()["non_field_errors"][0],
|
||||||
|
"A cpuload check for this agent already exists",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_add_memory_check(self):
|
||||||
|
url = "/checks/checks/"
|
||||||
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
payload = {
|
||||||
|
"pk": agent.pk,
|
||||||
|
"check": {
|
||||||
|
"check_type": "memory",
|
||||||
|
"threshold": 78,
|
||||||
|
"fails_b4_alert": 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.post(url, payload, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
payload["threshold"] = 55
|
||||||
|
resp = self.client.post(url, payload, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 400)
|
||||||
|
self.assertEqual(
|
||||||
|
resp.json()["non_field_errors"][0],
|
||||||
|
"A memory check for this agent already exists",
|
||||||
|
)
|
||||||
|
|
||||||
def test_get_policy_disk_check(self):
|
def test_get_policy_disk_check(self):
|
||||||
# setup data
|
# setup data
|
||||||
policy = baker.make("automation.Policy")
|
policy = baker.make("automation.Policy")
|
||||||
@@ -134,3 +183,69 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url_a)
|
self.check_not_authenticated("patch", url_a)
|
||||||
|
|
||||||
|
def test_get_check_history(self):
|
||||||
|
# setup data
|
||||||
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||||
|
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
|
||||||
|
check_history_data = baker.make(
|
||||||
|
"checks.CheckHistory",
|
||||||
|
check_history=check,
|
||||||
|
_quantity=30,
|
||||||
|
)
|
||||||
|
|
||||||
|
# need to manually set the date back 35 days
|
||||||
|
for check_history in check_history_data:
|
||||||
|
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
|
||||||
|
check_history.save()
|
||||||
|
|
||||||
|
# test invalid check pk
|
||||||
|
resp = self.client.patch("/checks/history/500/", format="json")
|
||||||
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
|
url = f"/checks/history/{check.id}/"
|
||||||
|
|
||||||
|
# test with timeFilter last 30 days
|
||||||
|
data = {"timeFilter": 30}
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(len(resp.data), 30)
|
||||||
|
|
||||||
|
# test with timeFilter equal to 0
|
||||||
|
data = {"timeFilter": 0}
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(len(resp.data), 60)
|
||||||
|
|
||||||
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCheckTasks(TacticalTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.setup_coresettings()
|
||||||
|
|
||||||
|
def test_prune_check_history(self):
|
||||||
|
from .tasks import prune_check_history
|
||||||
|
|
||||||
|
# setup data
|
||||||
|
check = baker.make_recipe("checks.diskspace_check")
|
||||||
|
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
|
||||||
|
check_history_data = baker.make(
|
||||||
|
"checks.CheckHistory",
|
||||||
|
check_history=check,
|
||||||
|
_quantity=30,
|
||||||
|
)
|
||||||
|
|
||||||
|
# need to manually set the date back 35 days
|
||||||
|
for check_history in check_history_data:
|
||||||
|
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
|
||||||
|
check_history.save()
|
||||||
|
|
||||||
|
# prune data 30 days old
|
||||||
|
prune_check_history(30)
|
||||||
|
self.assertEqual(CheckHistory.objects.count(), 30)
|
||||||
|
|
||||||
|
# prune all Check history Data
|
||||||
|
prune_check_history(0)
|
||||||
|
self.assertEqual(CheckHistory.objects.count(), 0)
|
||||||
|
|||||||
@@ -7,4 +7,5 @@ urlpatterns = [
|
|||||||
path("<pk>/loadchecks/", views.load_checks),
|
path("<pk>/loadchecks/", views.load_checks),
|
||||||
path("getalldisks/", views.get_disks_for_policies),
|
path("getalldisks/", views.get_disks_for_policies),
|
||||||
path("runchecks/<pk>/", views.run_checks),
|
path("runchecks/<pk>/", views.run_checks),
|
||||||
|
path("history/<int:checkpk>/", views.CheckHistory.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
|
from django.db.models import Q
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
|
from datetime import datetime as dt
|
||||||
|
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@@ -13,7 +17,7 @@ from automation.models import Policy
|
|||||||
from .models import Check
|
from .models import Check
|
||||||
from scripts.models import Script
|
from scripts.models import Script
|
||||||
|
|
||||||
from .serializers import CheckSerializer
|
from .serializers import CheckSerializer, CheckHistorySerializer
|
||||||
|
|
||||||
|
|
||||||
from automation.tasks import (
|
from automation.tasks import (
|
||||||
@@ -135,6 +139,29 @@ class GetUpdateDeleteCheck(APIView):
|
|||||||
return Response(f"{check.readable_desc} was deleted!")
|
return Response(f"{check.readable_desc} was deleted!")
|
||||||
|
|
||||||
|
|
||||||
|
class CheckHistory(APIView):
|
||||||
|
def patch(self, request, checkpk):
|
||||||
|
check = get_object_or_404(Check, pk=checkpk)
|
||||||
|
|
||||||
|
timeFilter = Q()
|
||||||
|
|
||||||
|
if "timeFilter" in request.data:
|
||||||
|
if request.data["timeFilter"] != 0:
|
||||||
|
timeFilter = Q(
|
||||||
|
x__lte=djangotime.make_aware(dt.today()),
|
||||||
|
x__gt=djangotime.make_aware(dt.today())
|
||||||
|
- djangotime.timedelta(days=request.data["timeFilter"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
check_history = check.check_history.filter(timeFilter).order_by("-x")
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
CheckHistorySerializer(
|
||||||
|
check_history, context={"timezone": check.agent.timezone}, many=True
|
||||||
|
).data
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
def run_checks(request, pk):
|
def run_checks(request, pk):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
|
|||||||
@@ -6,48 +6,48 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('clients', '0004_auto_20200821_2115'),
|
("clients", "0004_auto_20200821_2115"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='client',
|
model_name="client",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='client',
|
model_name="client",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='client',
|
model_name="client",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='client',
|
model_name="client",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
field=models.DateTimeField(auto_now=True, null=True),
|
field=models.DateTimeField(auto_now=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='site',
|
model_name="site",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='site',
|
model_name="site",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='site',
|
model_name="site",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='site',
|
model_name="site",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
field=models.DateTimeField(auto_now=True, null=True),
|
field=models.DateTimeField(auto_now=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -8,24 +8,67 @@ import uuid
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('knox', '0007_auto_20190111_0542'),
|
("knox", "0007_auto_20190111_0542"),
|
||||||
('clients', '0005_auto_20200922_1344'),
|
("clients", "0005_auto_20200922_1344"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name='Deployment',
|
name="Deployment",
|
||||||
fields=[
|
fields=[
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
(
|
||||||
('uid', models.UUIDField(default=uuid.uuid4, editable=False)),
|
"id",
|
||||||
('mon_type', models.CharField(choices=[('server', 'Server'), ('workstation', 'Workstation')], default='server', max_length=255)),
|
models.AutoField(
|
||||||
('arch', models.CharField(choices=[('64', '64 bit'), ('32', '32 bit')], default='64', max_length=255)),
|
auto_created=True,
|
||||||
('expiry', models.DateTimeField(blank=True, null=True)),
|
primary_key=True,
|
||||||
('token_key', models.CharField(max_length=255)),
|
serialize=False,
|
||||||
('install_flags', models.JSONField(blank=True, null=True)),
|
verbose_name="ID",
|
||||||
('auth_token', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deploytokens', to='knox.authtoken')),
|
),
|
||||||
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deployclients', to='clients.client')),
|
),
|
||||||
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deploysites', to='clients.site')),
|
("uid", models.UUIDField(default=uuid.uuid4, editable=False)),
|
||||||
|
(
|
||||||
|
"mon_type",
|
||||||
|
models.CharField(
|
||||||
|
choices=[("server", "Server"), ("workstation", "Workstation")],
|
||||||
|
default="server",
|
||||||
|
max_length=255,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"arch",
|
||||||
|
models.CharField(
|
||||||
|
choices=[("64", "64 bit"), ("32", "32 bit")],
|
||||||
|
default="64",
|
||||||
|
max_length=255,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("expiry", models.DateTimeField(blank=True, null=True)),
|
||||||
|
("token_key", models.CharField(max_length=255)),
|
||||||
|
("install_flags", models.JSONField(blank=True, null=True)),
|
||||||
|
(
|
||||||
|
"auth_token",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="deploytokens",
|
||||||
|
to="knox.authtoken",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"client",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="deployclients",
|
||||||
|
to="clients.client",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"site",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="deploysites",
|
||||||
|
to="clients.site",
|
||||||
|
),
|
||||||
|
),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,18 +6,18 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('clients', '0006_deployment'),
|
("clients", "0006_deployment"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RenameField(
|
migrations.RenameField(
|
||||||
model_name='client',
|
model_name="client",
|
||||||
old_name='client',
|
old_name="client",
|
||||||
new_name='name',
|
new_name="name",
|
||||||
),
|
),
|
||||||
migrations.RenameField(
|
migrations.RenameField(
|
||||||
model_name='site',
|
model_name="site",
|
||||||
old_name='site',
|
old_name="site",
|
||||||
new_name='name',
|
new_name="name",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,16 +6,16 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('clients', '0007_auto_20201102_1920'),
|
("clients", "0007_auto_20201102_1920"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterModelOptions(
|
migrations.AlterModelOptions(
|
||||||
name='client',
|
name="client",
|
||||||
options={'ordering': ('name',)},
|
options={"ordering": ("name",)},
|
||||||
),
|
),
|
||||||
migrations.AlterModelOptions(
|
migrations.AlterModelOptions(
|
||||||
name='site',
|
name="site",
|
||||||
options={'ordering': ('name',)},
|
options={"ordering": ("name",)},
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -192,7 +192,7 @@ class GenerateAgent(APIView):
|
|||||||
if not os.path.exists(go_bin):
|
if not os.path.exists(go_bin):
|
||||||
return notify_error("Missing golang")
|
return notify_error("Missing golang")
|
||||||
|
|
||||||
api = f"{request.scheme}://{request.get_host()}"
|
api = f"https://{request.get_host()}"
|
||||||
inno = (
|
inno = (
|
||||||
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||||
if d.arch == "64"
|
if d.arch == "64"
|
||||||
|
|||||||
@@ -57,7 +57,6 @@ func main() {
|
|||||||
|
|
||||||
debugLog := flag.String("log", "", "Verbose output")
|
debugLog := flag.String("log", "", "Verbose output")
|
||||||
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
|
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
|
||||||
noSalt := flag.Bool("nosalt", false, "Does not install salt")
|
|
||||||
silent := flag.Bool("silent", false, "Do not popup any message boxes during installation")
|
silent := flag.Bool("silent", false, "Do not popup any message boxes during installation")
|
||||||
cert := flag.String("cert", "", "Path to ca.pem")
|
cert := flag.String("cert", "", "Path to ca.pem")
|
||||||
timeout := flag.String("timeout", "", "Timeout for subprocess calls")
|
timeout := flag.String("timeout", "", "Timeout for subprocess calls")
|
||||||
@@ -86,10 +85,6 @@ func main() {
|
|||||||
cmdArgs = append(cmdArgs, "-silent")
|
cmdArgs = append(cmdArgs, "-silent")
|
||||||
}
|
}
|
||||||
|
|
||||||
if *noSalt {
|
|
||||||
cmdArgs = append(cmdArgs, "-nosalt")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(strings.TrimSpace(*localMesh)) != 0 {
|
if len(strings.TrimSpace(*localMesh)) != 0 {
|
||||||
cmdArgs = append(cmdArgs, "-local-mesh", *localMesh)
|
cmdArgs = append(cmdArgs, "-local-mesh", *localMesh)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,13 +6,13 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('core', '0008_auto_20200910_1434'),
|
("core", "0008_auto_20200910_1434"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='agent_auto_update',
|
name="agent_auto_update",
|
||||||
field=models.BooleanField(default=True),
|
field=models.BooleanField(default=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('core', '0009_coresettings_agent_auto_update'),
|
("core", "0009_coresettings_agent_auto_update"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
field=models.DateTimeField(auto_now=True, null=True),
|
field=models.DateTimeField(auto_now=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -7,28 +7,34 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('core', '0010_auto_20201002_1257'),
|
("core", "0010_auto_20201002_1257"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='sms_alert_recipients',
|
name="sms_alert_recipients",
|
||||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None),
|
field=django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
null=True,
|
||||||
|
size=None,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='twilio_account_sid',
|
name="twilio_account_sid",
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='twilio_auth_token',
|
name="twilio_auth_token",
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='twilio_number',
|
name="twilio_number",
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("core", "0011_auto_20201026_0719"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="coresettings",
|
||||||
|
name="check_history_prune_days",
|
||||||
|
field=models.PositiveIntegerField(default=30),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -49,6 +49,8 @@ class CoreSettings(BaseAuditModel):
|
|||||||
default_time_zone = models.CharField(
|
default_time_zone = models.CharField(
|
||||||
max_length=255, choices=TZ_CHOICES, default="America/Los_Angeles"
|
max_length=255, choices=TZ_CHOICES, default="America/Los_Angeles"
|
||||||
)
|
)
|
||||||
|
# removes check history older than days
|
||||||
|
check_history_prune_days = models.PositiveIntegerField(default=30)
|
||||||
mesh_token = models.CharField(max_length=255, null=True, blank=True, default="")
|
mesh_token = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||||
mesh_username = models.CharField(max_length=255, null=True, blank=True, default="")
|
mesh_username = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||||
mesh_site = models.CharField(max_length=255, null=True, blank=True, default="")
|
mesh_site = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||||
|
|||||||
@@ -4,8 +4,10 @@ from loguru import logger
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
|
from core.models import CoreSettings
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
from autotasks.tasks import delete_win_task_schedule
|
from autotasks.tasks import delete_win_task_schedule
|
||||||
|
from checks.tasks import prune_check_history
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
@@ -25,3 +27,7 @@ def core_maintenance_tasks():
|
|||||||
|
|
||||||
if now > task_time_utc:
|
if now > task_time_utc:
|
||||||
delete_win_task_schedule.delay(task.pk)
|
delete_win_task_schedule.delay(task.pk)
|
||||||
|
|
||||||
|
# remove old CheckHistory data
|
||||||
|
older_than = CoreSettings.objects.first().check_history_prune_days
|
||||||
|
prune_check_history.delay(older_than)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
from core.tasks import core_maintenance_tasks
|
from core.tasks import core_maintenance_tasks
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
from core.models import CoreSettings
|
||||||
from model_bakery import baker, seq
|
from model_bakery import baker, seq
|
||||||
|
|
||||||
|
|
||||||
@@ -34,8 +35,57 @@ class TestCoreTasks(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||||
|
def test_edit_coresettings(self, generate_all_agent_checks_task):
|
||||||
|
url = "/core/editsettings/"
|
||||||
|
|
||||||
|
# setup
|
||||||
|
policies = baker.make("Policy", _quantity=2)
|
||||||
|
# test normal request
|
||||||
|
data = {
|
||||||
|
"smtp_from_email": "newexample@example.com",
|
||||||
|
"mesh_token": "New_Mesh_Token",
|
||||||
|
}
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
CoreSettings.objects.first().smtp_from_email, data["smtp_from_email"]
|
||||||
|
)
|
||||||
|
self.assertEqual(CoreSettings.objects.first().mesh_token, data["mesh_token"])
|
||||||
|
|
||||||
|
generate_all_agent_checks_task.assert_not_called()
|
||||||
|
|
||||||
|
# test adding policy
|
||||||
|
data = {
|
||||||
|
"workstation_policy": policies[0].id,
|
||||||
|
"server_policy": policies[1].id,
|
||||||
|
}
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(CoreSettings.objects.first().server_policy.id, policies[1].id)
|
||||||
|
self.assertEqual(
|
||||||
|
CoreSettings.objects.first().workstation_policy.id, policies[0].id
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(generate_all_agent_checks_task.call_count, 2)
|
||||||
|
|
||||||
|
generate_all_agent_checks_task.reset_mock()
|
||||||
|
|
||||||
|
# test remove policy
|
||||||
|
data = {
|
||||||
|
"workstation_policy": "",
|
||||||
|
}
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(CoreSettings.objects.first().workstation_policy, None)
|
||||||
|
|
||||||
|
self.assertEqual(generate_all_agent_checks_task.call_count, 1)
|
||||||
|
|
||||||
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
@patch("tacticalrmm.utils.reload_nats")
|
||||||
@patch("autotasks.tasks.remove_orphaned_win_tasks.delay")
|
@patch("autotasks.tasks.remove_orphaned_win_tasks.delay")
|
||||||
def test_ui_maintenance_actions(self, remove_orphaned_win_tasks):
|
def test_ui_maintenance_actions(self, remove_orphaned_win_tasks, reload_nats):
|
||||||
url = "/core/servermaintenance/"
|
url = "/core/servermaintenance/"
|
||||||
|
|
||||||
agents = baker.make_recipe("agents.online_agent", _quantity=3)
|
agents = baker.make_recipe("agents.online_agent", _quantity=3)
|
||||||
@@ -54,6 +104,7 @@ class TestCoreTasks(TacticalTestCase):
|
|||||||
data = {"action": "reload_nats"}
|
data = {"action": "reload_nats"}
|
||||||
r = self.client.post(url, data)
|
r = self.client.post(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
reload_nats.assert_called_once()
|
||||||
|
|
||||||
# test prune db with no tables
|
# test prune db with no tables
|
||||||
data = {"action": "prune_db"}
|
data = {"action": "prune_db"}
|
||||||
|
|||||||
@@ -42,21 +42,19 @@ def get_core_settings(request):
|
|||||||
|
|
||||||
@api_view(["PATCH"])
|
@api_view(["PATCH"])
|
||||||
def edit_settings(request):
|
def edit_settings(request):
|
||||||
settings = CoreSettings.objects.first()
|
coresettings = CoreSettings.objects.first()
|
||||||
serializer = CoreSettingsSerializer(instance=settings, data=request.data)
|
old_server_policy = coresettings.server_policy
|
||||||
|
old_workstation_policy = coresettings.workstation_policy
|
||||||
|
serializer = CoreSettingsSerializer(instance=coresettings, data=request.data)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
new_settings = serializer.save()
|
new_settings = serializer.save()
|
||||||
|
|
||||||
# check if default policies changed
|
# check if default policies changed
|
||||||
if settings.server_policy != new_settings.server_policy:
|
if old_server_policy != new_settings.server_policy:
|
||||||
generate_all_agent_checks_task.delay(
|
generate_all_agent_checks_task.delay(mon_type="server", create_tasks=True)
|
||||||
mon_type="server", clear=True, create_tasks=True
|
|
||||||
)
|
|
||||||
|
|
||||||
if settings.workstation_policy != new_settings.workstation_policy:
|
if old_workstation_policy != new_settings.workstation_policy:
|
||||||
generate_all_agent_checks_task.delay(
|
generate_all_agent_checks_task.delay(mon_type="workstation", create_tasks=True)
|
||||||
mon_type="workstation", clear=True, create_tasks=True
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
@@ -69,7 +67,13 @@ def version(request):
|
|||||||
@api_view()
|
@api_view()
|
||||||
def dashboard_info(request):
|
def dashboard_info(request):
|
||||||
return Response(
|
return Response(
|
||||||
{"trmm_version": settings.TRMM_VERSION, "dark_mode": request.user.dark_mode}
|
{
|
||||||
|
"trmm_version": settings.TRMM_VERSION,
|
||||||
|
"dark_mode": request.user.dark_mode,
|
||||||
|
"show_community_scripts": request.user.show_community_scripts,
|
||||||
|
"dbl_click_action": request.user.agent_dblclick_action,
|
||||||
|
"default_agent_tbl_tab": request.user.default_agent_tbl_tab,
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,13 +6,28 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('logs', '0007_auditlog_debug_info'),
|
("logs", "0007_auditlog_debug_info"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='auditlog',
|
model_name="auditlog",
|
||||||
name='action',
|
name="action",
|
||||||
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command')], max_length=100),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("login", "User Login"),
|
||||||
|
("failed_login", "Failed User Login"),
|
||||||
|
("delete", "Delete Object"),
|
||||||
|
("modify", "Modify Object"),
|
||||||
|
("add", "Add Object"),
|
||||||
|
("view", "View Object"),
|
||||||
|
("check_run", "Check Run"),
|
||||||
|
("task_run", "Task Run"),
|
||||||
|
("remote_session", "Remote Session"),
|
||||||
|
("execute_script", "Execute Script"),
|
||||||
|
("execute_command", "Execute Command"),
|
||||||
|
],
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,29 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('logs', '0008_auto_20201110_1431'),
|
("logs", "0008_auto_20201110_1431"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='auditlog',
|
model_name="auditlog",
|
||||||
name='action',
|
name="action",
|
||||||
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command')], max_length=100),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("login", "User Login"),
|
||||||
|
("failed_login", "Failed User Login"),
|
||||||
|
("delete", "Delete Object"),
|
||||||
|
("modify", "Modify Object"),
|
||||||
|
("add", "Add Object"),
|
||||||
|
("view", "View Object"),
|
||||||
|
("check_run", "Check Run"),
|
||||||
|
("task_run", "Task Run"),
|
||||||
|
("agent_install", "Agent Install"),
|
||||||
|
("remote_session", "Remote Session"),
|
||||||
|
("execute_script", "Execute Script"),
|
||||||
|
("execute_command", "Execute Command"),
|
||||||
|
],
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,18 +6,50 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('logs', '0009_auto_20201110_1431'),
|
("logs", "0009_auto_20201110_1431"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='auditlog',
|
model_name="auditlog",
|
||||||
name='action',
|
name="action",
|
||||||
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command'), ('bulk_action', 'Bulk Action')], max_length=100),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("login", "User Login"),
|
||||||
|
("failed_login", "Failed User Login"),
|
||||||
|
("delete", "Delete Object"),
|
||||||
|
("modify", "Modify Object"),
|
||||||
|
("add", "Add Object"),
|
||||||
|
("view", "View Object"),
|
||||||
|
("check_run", "Check Run"),
|
||||||
|
("task_run", "Task Run"),
|
||||||
|
("agent_install", "Agent Install"),
|
||||||
|
("remote_session", "Remote Session"),
|
||||||
|
("execute_script", "Execute Script"),
|
||||||
|
("execute_command", "Execute Command"),
|
||||||
|
("bulk_action", "Bulk Action"),
|
||||||
|
],
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='auditlog',
|
model_name="auditlog",
|
||||||
name='object_type',
|
name="object_type",
|
||||||
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk')], max_length=100),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("user", "User"),
|
||||||
|
("script", "Script"),
|
||||||
|
("agent", "Agent"),
|
||||||
|
("policy", "Policy"),
|
||||||
|
("winupdatepolicy", "Patch Policy"),
|
||||||
|
("client", "Client"),
|
||||||
|
("site", "Site"),
|
||||||
|
("check", "Check"),
|
||||||
|
("automatedtask", "Automated Task"),
|
||||||
|
("coresettings", "Core Settings"),
|
||||||
|
("bulk", "Bulk"),
|
||||||
|
],
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,22 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('logs', '0010_auto_20201110_2238'),
|
("logs", "0010_auto_20201110_2238"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='pendingaction',
|
model_name="pendingaction",
|
||||||
name='action_type',
|
name="action_type",
|
||||||
field=models.CharField(blank=True, choices=[('schedreboot', 'Scheduled Reboot'), ('taskaction', 'Scheduled Task Action'), ('agentupdate', 'Agent Update')], max_length=255, null=True),
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
choices=[
|
||||||
|
("schedreboot", "Scheduled Reboot"),
|
||||||
|
("taskaction", "Scheduled Task Action"),
|
||||||
|
("agentupdate", "Agent Update"),
|
||||||
|
],
|
||||||
|
max_length=255,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
5
api/tacticalrmm/natsapi/apps.py
Normal file
5
api/tacticalrmm/natsapi/apps.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class NatsapiConfig(AppConfig):
|
||||||
|
name = "natsapi"
|
||||||
10
api/tacticalrmm/natsapi/urls.py
Normal file
10
api/tacticalrmm/natsapi/urls.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from django.urls import path
|
||||||
|
from . import views
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path("natsinfo/", views.nats_info),
|
||||||
|
path("checkin/", views.NatsCheckIn.as_view()),
|
||||||
|
path("syncmesh/", views.SyncMeshNodeID.as_view()),
|
||||||
|
path("winupdates/", views.NatsWinUpdates.as_view()),
|
||||||
|
path("choco/", views.NatsChoco.as_view()),
|
||||||
|
]
|
||||||
236
api/tacticalrmm/natsapi/views.py
Normal file
236
api/tacticalrmm/natsapi/views.py
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
import asyncio
|
||||||
|
import time
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
from rest_framework.decorators import (
|
||||||
|
api_view,
|
||||||
|
permission_classes,
|
||||||
|
authentication_classes,
|
||||||
|
)
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
|
||||||
|
from agents.models import Agent
|
||||||
|
from winupdate.models import WinUpdate
|
||||||
|
from software.models import InstalledSoftware
|
||||||
|
from checks.utils import bytes2human
|
||||||
|
from agents.serializers import WinAgentSerializer
|
||||||
|
from agents.tasks import (
|
||||||
|
agent_recovery_email_task,
|
||||||
|
agent_recovery_sms_task,
|
||||||
|
handle_agent_recovery_task,
|
||||||
|
)
|
||||||
|
|
||||||
|
from tacticalrmm.utils import notify_error, filter_software, SoftwareList
|
||||||
|
|
||||||
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
|
@api_view()
|
||||||
|
@permission_classes([])
|
||||||
|
@authentication_classes([])
|
||||||
|
def nats_info(request):
|
||||||
|
return Response({"user": "tacticalrmm", "password": settings.SECRET_KEY})
|
||||||
|
|
||||||
|
|
||||||
|
class NatsCheckIn(APIView):
|
||||||
|
|
||||||
|
authentication_classes = []
|
||||||
|
permission_classes = []
|
||||||
|
|
||||||
|
def patch(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
agent.version = request.data["version"]
|
||||||
|
agent.last_seen = djangotime.now()
|
||||||
|
agent.save(update_fields=["version", "last_seen"])
|
||||||
|
|
||||||
|
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
|
||||||
|
last_outage = agent.agentoutages.last()
|
||||||
|
last_outage.recovery_time = djangotime.now()
|
||||||
|
last_outage.save(update_fields=["recovery_time"])
|
||||||
|
|
||||||
|
if agent.overdue_email_alert:
|
||||||
|
agent_recovery_email_task.delay(pk=last_outage.pk)
|
||||||
|
if agent.overdue_text_alert:
|
||||||
|
agent_recovery_sms_task.delay(pk=last_outage.pk)
|
||||||
|
|
||||||
|
recovery = agent.recoveryactions.filter(last_run=None).last()
|
||||||
|
if recovery is not None:
|
||||||
|
recovery.last_run = djangotime.now()
|
||||||
|
recovery.save(update_fields=["last_run"])
|
||||||
|
handle_agent_recovery_task.delay(pk=recovery.pk)
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
# get any pending actions
|
||||||
|
if agent.pendingactions.filter(status="pending").exists():
|
||||||
|
agent.handle_pending_actions()
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def put(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||||
|
|
||||||
|
if request.data["func"] == "disks":
|
||||||
|
disks = request.data["disks"]
|
||||||
|
new = []
|
||||||
|
for disk in disks:
|
||||||
|
tmp = {}
|
||||||
|
for _, _ in disk.items():
|
||||||
|
tmp["device"] = disk["device"]
|
||||||
|
tmp["fstype"] = disk["fstype"]
|
||||||
|
tmp["total"] = bytes2human(disk["total"])
|
||||||
|
tmp["used"] = bytes2human(disk["used"])
|
||||||
|
tmp["free"] = bytes2human(disk["free"])
|
||||||
|
tmp["percent"] = int(disk["percent"])
|
||||||
|
new.append(tmp)
|
||||||
|
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save(disks=new)
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
if request.data["func"] == "loggedonuser":
|
||||||
|
if request.data["logged_in_username"] != "None":
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
if request.data["func"] == "software":
|
||||||
|
raw: SoftwareList = request.data["software"]
|
||||||
|
if not isinstance(raw, list):
|
||||||
|
return notify_error("err")
|
||||||
|
|
||||||
|
sw = filter_software(raw)
|
||||||
|
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||||
|
InstalledSoftware(agent=agent, software=sw).save()
|
||||||
|
else:
|
||||||
|
s = agent.installedsoftware_set.first()
|
||||||
|
s.software = sw
|
||||||
|
s.save(update_fields=["software"])
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
# called once during tacticalagent windows service startup
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
if not agent.choco_installed:
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False))
|
||||||
|
|
||||||
|
time.sleep(0.5)
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class SyncMeshNodeID(APIView):
|
||||||
|
authentication_classes = []
|
||||||
|
permission_classes = []
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
if agent.mesh_node_id != request.data["nodeid"]:
|
||||||
|
agent.mesh_node_id = request.data["nodeid"]
|
||||||
|
agent.save(update_fields=["mesh_node_id"])
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class NatsChoco(APIView):
|
||||||
|
authentication_classes = []
|
||||||
|
permission_classes = []
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
agent.choco_installed = request.data["installed"]
|
||||||
|
agent.save(update_fields=["choco_installed"])
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class NatsWinUpdates(APIView):
|
||||||
|
authentication_classes = []
|
||||||
|
permission_classes = []
|
||||||
|
|
||||||
|
def put(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
reboot_policy: str = agent.get_patch_policy().reboot_after_install
|
||||||
|
reboot = False
|
||||||
|
|
||||||
|
if reboot_policy == "always":
|
||||||
|
reboot = True
|
||||||
|
|
||||||
|
if request.data["needs_reboot"]:
|
||||||
|
if reboot_policy == "required":
|
||||||
|
reboot = True
|
||||||
|
elif reboot_policy == "never":
|
||||||
|
agent.needs_reboot = True
|
||||||
|
agent.save(update_fields=["needs_reboot"])
|
||||||
|
|
||||||
|
if reboot:
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||||
|
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def patch(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
u = agent.winupdates.filter(guid=request.data["guid"]).last()
|
||||||
|
success: bool = request.data["success"]
|
||||||
|
if success:
|
||||||
|
u.result = "success"
|
||||||
|
u.downloaded = True
|
||||||
|
u.installed = True
|
||||||
|
u.date_installed = djangotime.now()
|
||||||
|
u.save(
|
||||||
|
update_fields=[
|
||||||
|
"result",
|
||||||
|
"downloaded",
|
||||||
|
"installed",
|
||||||
|
"date_installed",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
u.result = "failed"
|
||||||
|
u.save(update_fields=["result"])
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
updates = request.data["wua_updates"]
|
||||||
|
for update in updates:
|
||||||
|
if agent.winupdates.filter(guid=update["guid"]).exists():
|
||||||
|
u = agent.winupdates.filter(guid=update["guid"]).last()
|
||||||
|
u.downloaded = update["downloaded"]
|
||||||
|
u.installed = update["installed"]
|
||||||
|
u.save(update_fields=["downloaded", "installed"])
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
kb = "KB" + update["kb_article_ids"][0]
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
WinUpdate(
|
||||||
|
agent=agent,
|
||||||
|
guid=update["guid"],
|
||||||
|
kb=kb,
|
||||||
|
title=update["title"],
|
||||||
|
installed=update["installed"],
|
||||||
|
downloaded=update["downloaded"],
|
||||||
|
description=update["description"],
|
||||||
|
severity=update["severity"],
|
||||||
|
categories=update["categories"],
|
||||||
|
category_ids=update["category_ids"],
|
||||||
|
kb_article_ids=update["kb_article_ids"],
|
||||||
|
more_info_urls=update["more_info_urls"],
|
||||||
|
support_url=update["support_url"],
|
||||||
|
revision_number=update["revision_number"],
|
||||||
|
).save()
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
@@ -1,38 +1,38 @@
|
|||||||
amqp==2.6.1
|
amqp==5.0.2
|
||||||
asgiref==3.3.1
|
asgiref==3.3.1
|
||||||
asyncio-nats-client==0.11.4
|
asyncio-nats-client==0.11.4
|
||||||
billiard==3.6.3.0
|
billiard==3.6.3.0
|
||||||
celery==4.4.6
|
celery==5.0.5
|
||||||
certifi==2020.12.5
|
certifi==2020.12.5
|
||||||
cffi==1.14.3
|
cffi==1.14.4
|
||||||
chardet==3.0.4
|
chardet==4.0.0
|
||||||
cryptography==3.2.1
|
cryptography==3.3.1
|
||||||
decorator==4.4.2
|
decorator==4.4.2
|
||||||
Django==3.1.4
|
Django==3.1.5
|
||||||
django-cors-headers==3.5.0
|
django-cors-headers==3.6.0
|
||||||
django-rest-knox==4.1.0
|
django-rest-knox==4.1.0
|
||||||
djangorestframework==3.12.2
|
djangorestframework==3.12.2
|
||||||
future==0.18.2
|
future==0.18.2
|
||||||
idna==2.10
|
idna==2.10
|
||||||
kombu==4.6.11
|
kombu==5.0.2
|
||||||
loguru==0.5.3
|
loguru==0.5.3
|
||||||
msgpack==1.0.0
|
msgpack==1.0.2
|
||||||
packaging==20.4
|
packaging==20.8
|
||||||
psycopg2-binary==2.8.6
|
psycopg2-binary==2.8.6
|
||||||
pycparser==2.20
|
pycparser==2.20
|
||||||
pycryptodome==3.9.9
|
pycryptodome==3.9.9
|
||||||
pyotp==2.4.1
|
pyotp==2.4.1
|
||||||
pyparsing==2.4.7
|
pyparsing==2.4.7
|
||||||
pytz==2020.4
|
pytz==2020.5
|
||||||
qrcode==6.1
|
qrcode==6.1
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
requests==2.25.0
|
requests==2.25.1
|
||||||
six==1.15.0
|
six==1.15.0
|
||||||
sqlparse==0.4.1
|
sqlparse==0.4.1
|
||||||
twilio==6.49.0
|
twilio==6.51.0
|
||||||
urllib3==1.26.2
|
urllib3==1.26.2
|
||||||
uWSGI==2.0.19.1
|
uWSGI==2.0.19.1
|
||||||
validators==0.18.1
|
validators==0.18.2
|
||||||
vine==1.3.0
|
vine==5.0.0
|
||||||
websockets==8.1
|
websockets==8.1
|
||||||
zipp==3.4.0
|
zipp==3.4.0
|
||||||
|
|||||||
@@ -6,8 +6,5 @@ script = Recipe(
|
|||||||
name="Test Script",
|
name="Test Script",
|
||||||
description="Test Desc",
|
description="Test Desc",
|
||||||
shell="cmd",
|
shell="cmd",
|
||||||
filename="test.bat",
|
|
||||||
script_type="userdefined",
|
script_type="userdefined",
|
||||||
)
|
)
|
||||||
|
|
||||||
builtin_script = script.extend(script_type="builtin")
|
|
||||||
|
|||||||
@@ -96,5 +96,103 @@
|
|||||||
"name": "Check BIOS Information",
|
"name": "Check BIOS Information",
|
||||||
"description": "Retreives and reports on BIOS make, version, and date .",
|
"description": "Retreives and reports on BIOS make, version, and date .",
|
||||||
"shell": "powershell"
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "ResetHighPerformancePowerProfiletoDefaults.ps1",
|
||||||
|
"submittedBy": "https://github.com/azulskyknight",
|
||||||
|
"name": "Reset High Perf Power Profile",
|
||||||
|
"description": "Resets monitor, disk, standby, and hibernate timers in the default High Performance power profile to their default values. It also re-indexes the AC and DC power profiles into their default order.",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "SetHighPerformancePowerProfile.ps1",
|
||||||
|
"submittedBy": "https://github.com/azulskyknight",
|
||||||
|
"name": "Set High Perf Power Profile",
|
||||||
|
"description": "Sets the High Performance Power profile to the active power profile. Use this to keep machines from falling asleep.",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "Windows10Upgrade.ps1",
|
||||||
|
"submittedBy": "https://github.com/RVL-Solutions and https://github.com/darimm",
|
||||||
|
"name": "Windows 10 Upgrade",
|
||||||
|
"description": "Forces an upgrade to the latest release of Windows 10.",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "DiskStatus.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Check Disks",
|
||||||
|
"description": "Checks local disks for errors reported in event viewer within the last 24 hours",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "DuplicatiStatus.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Check Duplicati",
|
||||||
|
"description": "Checks Duplicati Backup is running properly over the last 24 hours",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "EnableDefender.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Enable Windows Defender",
|
||||||
|
"description": "Enables Windows Defender and sets preferences",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "OpenSSHServerInstall.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Install SSH",
|
||||||
|
"description": "Installs and enabled OpenSSH Server",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "RDP_enable.bat",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Enable RDP",
|
||||||
|
"description": "Enables RDP",
|
||||||
|
"shell": "cmd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "Speedtest.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "PS Speed Test",
|
||||||
|
"description": "Powershell speed test (win 10 or server2016+)",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "SyncTime.bat",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Sync DC Time",
|
||||||
|
"description": "Syncs time with domain controller",
|
||||||
|
"shell": "cmd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "WinDefenderClearLogs.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Clear Defender Logs",
|
||||||
|
"description": "Clears Windows Defender Logs",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "WinDefenderStatus.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Defender Status",
|
||||||
|
"description": "This will check for Malware, Antispyware, that Windows Defender is Healthy, last scan etc within the last 24 hours",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "disable_FastStartup.bat",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Disable Fast Startup",
|
||||||
|
"description": "Disables Faststartup on Windows 10",
|
||||||
|
"shell": "cmd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "updatetacticalexclusion.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "TRMM Defender Exclusions",
|
||||||
|
"description": "Windows Defender Exclusions for Tactical RMM",
|
||||||
|
"shell": "cmd"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 3.1.3 on 2020-12-07 15:58
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("scripts", "0003_auto_20200922_1344"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="script",
|
||||||
|
name="category",
|
||||||
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="script",
|
||||||
|
name="favorite",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="script",
|
||||||
|
name="script_base64",
|
||||||
|
field=models.TextField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.3 on 2020-12-07 16:06
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("scripts", "0004_auto_20201207_1558"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name="script",
|
||||||
|
old_name="script_base64",
|
||||||
|
new_name="code_base64",
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2020-12-10 21:45
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
from django.conf import settings
|
||||||
|
import os
|
||||||
|
import base64
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def move_scripts_to_db(apps, schema_editor):
|
||||||
|
print("")
|
||||||
|
Script = apps.get_model("scripts", "Script")
|
||||||
|
for script in Script.objects.all():
|
||||||
|
if not script.script_type == "builtin":
|
||||||
|
|
||||||
|
if script.filename:
|
||||||
|
filepath = f"{settings.SCRIPTS_DIR}/userdefined/{script.filename}"
|
||||||
|
else:
|
||||||
|
print(f"No filename on script found. Skipping")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# test if file exists
|
||||||
|
if os.path.exists(filepath):
|
||||||
|
print(f"Found script {script.name}. Importing code.")
|
||||||
|
|
||||||
|
with open(filepath, "rb") as f:
|
||||||
|
script_bytes = f.read().decode("utf-8").encode("ascii", "ignore")
|
||||||
|
script.code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||||
|
script.save(update_fields=["code_base64"])
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Script file {script.name} was not found on the disk. You will need to edit the script in the UI"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("scripts", "0005_auto_20201207_1606"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [migrations.RunPython(move_scripts_to_db, migrations.RunPython.noop)]
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import base64
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -17,41 +18,27 @@ SCRIPT_TYPES = [
|
|||||||
class Script(BaseAuditModel):
|
class Script(BaseAuditModel):
|
||||||
name = models.CharField(max_length=255)
|
name = models.CharField(max_length=255)
|
||||||
description = models.TextField(null=True, blank=True)
|
description = models.TextField(null=True, blank=True)
|
||||||
filename = models.CharField(max_length=255)
|
filename = models.CharField(max_length=255) # deprecated
|
||||||
shell = models.CharField(
|
shell = models.CharField(
|
||||||
max_length=100, choices=SCRIPT_SHELLS, default="powershell"
|
max_length=100, choices=SCRIPT_SHELLS, default="powershell"
|
||||||
)
|
)
|
||||||
script_type = models.CharField(
|
script_type = models.CharField(
|
||||||
max_length=100, choices=SCRIPT_TYPES, default="userdefined"
|
max_length=100, choices=SCRIPT_TYPES, default="userdefined"
|
||||||
)
|
)
|
||||||
|
favorite = models.BooleanField(default=False)
|
||||||
|
category = models.CharField(max_length=100, null=True, blank=True)
|
||||||
|
code_base64 = models.TextField(null=True, blank=True)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.filename
|
return self.name
|
||||||
|
|
||||||
@property
|
|
||||||
def filepath(self):
|
|
||||||
# for the windows agent when using 'salt-call'
|
|
||||||
if self.script_type == "userdefined":
|
|
||||||
return f"salt://scripts//userdefined//{self.filename}"
|
|
||||||
else:
|
|
||||||
return f"salt://scripts//{self.filename}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def file(self):
|
|
||||||
if self.script_type == "userdefined":
|
|
||||||
return f"{settings.SCRIPTS_DIR}/userdefined/{self.filename}"
|
|
||||||
else:
|
|
||||||
return f"{settings.SCRIPTS_DIR}/{self.filename}"
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def code(self):
|
def code(self):
|
||||||
try:
|
if self.code_base64:
|
||||||
with open(self.file, "r") as f:
|
base64_bytes = self.code_base64.encode("ascii", "ignore")
|
||||||
text = f.read()
|
return base64.b64decode(base64_bytes).decode("ascii", "ignore")
|
||||||
except:
|
else:
|
||||||
text = "n/a"
|
return ""
|
||||||
|
|
||||||
return text
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_community_scripts(cls):
|
def load_community_scripts(cls):
|
||||||
@@ -62,7 +49,6 @@ class Script(BaseAuditModel):
|
|||||||
|
|
||||||
# load community uploaded scripts into the database
|
# load community uploaded scripts into the database
|
||||||
# skip ones that already exist, only updating name / desc in case it changes
|
# skip ones that already exist, only updating name / desc in case it changes
|
||||||
# files will be copied by the update script or in docker to /srv/salt/scripts
|
|
||||||
|
|
||||||
# for install script
|
# for install script
|
||||||
if not settings.DOCKER_BUILD:
|
if not settings.DOCKER_BUILD:
|
||||||
@@ -79,21 +65,40 @@ class Script(BaseAuditModel):
|
|||||||
for script in info:
|
for script in info:
|
||||||
if os.path.exists(os.path.join(scripts_dir, script["filename"])):
|
if os.path.exists(os.path.join(scripts_dir, script["filename"])):
|
||||||
s = cls.objects.filter(script_type="builtin").filter(
|
s = cls.objects.filter(script_type="builtin").filter(
|
||||||
filename=script["filename"]
|
name=script["name"]
|
||||||
)
|
)
|
||||||
if s.exists():
|
if s.exists():
|
||||||
i = s.first()
|
i = s.first()
|
||||||
i.name = script["name"]
|
i.name = script["name"]
|
||||||
i.description = script["description"]
|
i.description = script["description"]
|
||||||
i.save(update_fields=["name", "description"])
|
i.category = "Community"
|
||||||
|
|
||||||
|
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||||
|
script_bytes = (
|
||||||
|
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||||
|
)
|
||||||
|
i.code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||||
|
|
||||||
|
i.save(
|
||||||
|
update_fields=["name", "description", "category", "code_base64"]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print(f"Adding new community script: {script['name']}")
|
print(f"Adding new community script: {script['name']}")
|
||||||
|
|
||||||
|
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||||
|
script_bytes = (
|
||||||
|
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||||
|
)
|
||||||
|
code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||||
|
|
||||||
cls(
|
cls(
|
||||||
|
code_base64=code_base64,
|
||||||
name=script["name"],
|
name=script["name"],
|
||||||
description=script["description"],
|
description=script["description"],
|
||||||
filename=script["filename"],
|
filename=script["filename"],
|
||||||
shell=script["shell"],
|
shell=script["shell"],
|
||||||
script_type="builtin",
|
script_type="builtin",
|
||||||
|
category="Community",
|
||||||
).save()
|
).save()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
@@ -1,41 +1,33 @@
|
|||||||
import os
|
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from rest_framework.serializers import ModelSerializer, ValidationError, ReadOnlyField
|
|
||||||
from .models import Script
|
from .models import Script
|
||||||
|
|
||||||
|
|
||||||
class ScriptSerializer(ModelSerializer):
|
class ScriptTableSerializer(ModelSerializer):
|
||||||
|
|
||||||
code = ReadOnlyField()
|
|
||||||
filepath = ReadOnlyField()
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Script
|
model = Script
|
||||||
fields = "__all__"
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"script_type",
|
||||||
|
"shell",
|
||||||
|
"category",
|
||||||
|
"favorite",
|
||||||
|
]
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if "filename" in val:
|
|
||||||
# validate the filename
|
|
||||||
if (
|
|
||||||
not val["filename"].endswith(".py")
|
|
||||||
and not val["filename"].endswith(".ps1")
|
|
||||||
and not val["filename"].endswith(".bat")
|
|
||||||
):
|
|
||||||
raise ValidationError("File types supported are .py, .ps1 and .bat")
|
|
||||||
|
|
||||||
# make sure file doesn't already exist on server
|
class ScriptSerializer(ModelSerializer):
|
||||||
# but only if adding, not if editing since will overwrite if edit
|
class Meta:
|
||||||
if not self.instance:
|
model = Script
|
||||||
script_path = os.path.join(
|
fields = [
|
||||||
f"{settings.SCRIPTS_DIR}/userdefined", val["filename"]
|
"id",
|
||||||
)
|
"name",
|
||||||
if os.path.exists(script_path):
|
"description",
|
||||||
raise ValidationError(
|
"shell",
|
||||||
f"{val['filename']} already exists. Delete or edit the existing script first."
|
"category",
|
||||||
)
|
"favorite",
|
||||||
|
"code_base64",
|
||||||
return val
|
]
|
||||||
|
|
||||||
|
|
||||||
class ScriptCheckSerializer(ModelSerializer):
|
class ScriptCheckSerializer(ModelSerializer):
|
||||||
|
|||||||
@@ -6,25 +6,9 @@ from scripts.models import Script
|
|||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def handle_bulk_command_task(agentpks, cmd, shell, timeout):
|
def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||||
agents = Agent.objects.filter(pk__in=agentpks)
|
agents = Agent.objects.filter(pk__in=agentpks)
|
||||||
|
|
||||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||||
agents_salt = [agent for agent in agents if not agent.has_nats]
|
|
||||||
minions = [agent.salt_id for agent in agents_salt]
|
|
||||||
|
|
||||||
if minions:
|
|
||||||
Agent.salt_batch_async(
|
|
||||||
minions=minions,
|
|
||||||
func="cmd.run_bg",
|
|
||||||
kwargs={
|
|
||||||
"cmd": cmd,
|
|
||||||
"shell": shell,
|
|
||||||
"timeout": timeout,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if agents_nats:
|
|
||||||
nats_data = {
|
nats_data = {
|
||||||
"func": "rawcmd",
|
"func": "rawcmd",
|
||||||
"timeout": timeout,
|
"timeout": timeout,
|
||||||
@@ -38,28 +22,10 @@ def handle_bulk_command_task(agentpks, cmd, shell, timeout):
|
|||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout):
|
def handle_bulk_script_task(scriptpk, agentpks, args, timeout) -> None:
|
||||||
script = Script.objects.get(pk=scriptpk)
|
script = Script.objects.get(pk=scriptpk)
|
||||||
agents = Agent.objects.filter(pk__in=agentpks)
|
agents = Agent.objects.filter(pk__in=agentpks)
|
||||||
|
|
||||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||||
agents_salt = [agent for agent in agents if not agent.has_nats]
|
|
||||||
minions = [agent.salt_id for agent in agents_salt]
|
|
||||||
|
|
||||||
if minions:
|
|
||||||
Agent.salt_batch_async(
|
|
||||||
minions=minions,
|
|
||||||
func="win_agent.run_script",
|
|
||||||
kwargs={
|
|
||||||
"filepath": script.filepath,
|
|
||||||
"filename": script.filename,
|
|
||||||
"shell": script.shell,
|
|
||||||
"timeout": timeout,
|
|
||||||
"args": args,
|
|
||||||
"bg": True if script.shell == "python" else False, # salt bg script bug
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
nats_data = {
|
nats_data = {
|
||||||
"func": "runscript",
|
"func": "runscript",
|
||||||
"timeout": timeout,
|
"timeout": timeout,
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
from .serializers import ScriptSerializer
|
from .serializers import ScriptSerializer, ScriptTableSerializer
|
||||||
from .models import Script
|
from .models import Script
|
||||||
|
|
||||||
|
|
||||||
@@ -16,16 +17,50 @@ class TestScriptViews(TacticalTestCase):
|
|||||||
url = "/scripts/scripts/"
|
url = "/scripts/scripts/"
|
||||||
scripts = baker.make("scripts.Script", _quantity=3)
|
scripts = baker.make("scripts.Script", _quantity=3)
|
||||||
|
|
||||||
serializer = ScriptSerializer(scripts, many=True)
|
serializer = ScriptTableSerializer(scripts, many=True)
|
||||||
resp = self.client.get(url, format="json")
|
resp = self.client.get(url, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEqual(serializer.data, resp.data)
|
self.assertEqual(serializer.data, resp.data)
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
# TODO Need to test file uploads and saves
|
|
||||||
def test_add_script(self):
|
def test_add_script(self):
|
||||||
pass
|
url = f"/scripts/scripts/"
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"name": "Name",
|
||||||
|
"description": "Description",
|
||||||
|
"shell": "powershell",
|
||||||
|
"category": "New",
|
||||||
|
"code": "Some Test Code\nnew Line",
|
||||||
|
}
|
||||||
|
|
||||||
|
# test without file upload
|
||||||
|
resp = self.client.post(url, data)
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertTrue(Script.objects.filter(name="Name").exists())
|
||||||
|
self.assertEqual(Script.objects.get(name="Name").code, data["code"])
|
||||||
|
|
||||||
|
# test with file upload
|
||||||
|
# file with 'Test' as content
|
||||||
|
file = SimpleUploadedFile(
|
||||||
|
"test_script.bat", b"\x54\x65\x73\x74", content_type="text/plain"
|
||||||
|
)
|
||||||
|
data = {
|
||||||
|
"name": "New Name",
|
||||||
|
"description": "Description",
|
||||||
|
"shell": "cmd",
|
||||||
|
"category": "New",
|
||||||
|
"filename": file,
|
||||||
|
}
|
||||||
|
|
||||||
|
# test with file upload
|
||||||
|
resp = self.client.post(url, data, format="multipart")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
script = Script.objects.filter(name="New Name").first()
|
||||||
|
self.assertEquals(script.code, "Test")
|
||||||
|
|
||||||
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
def test_modify_script(self):
|
def test_modify_script(self):
|
||||||
# test a call where script doesn't exist
|
# test a call where script doesn't exist
|
||||||
@@ -40,23 +75,39 @@ class TestScriptViews(TacticalTestCase):
|
|||||||
"name": script.name,
|
"name": script.name,
|
||||||
"description": "Description Change",
|
"description": "Description Change",
|
||||||
"shell": script.shell,
|
"shell": script.shell,
|
||||||
|
"code": "Test Code\nAnother Line",
|
||||||
}
|
}
|
||||||
|
|
||||||
# test edit a userdefined script
|
# test edit a userdefined script
|
||||||
resp = self.client.put(url, data, format="json")
|
resp = self.client.put(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEquals(
|
script = Script.objects.get(pk=script.pk)
|
||||||
Script.objects.get(pk=script.pk).description, "Description Change"
|
self.assertEquals(script.description, "Description Change")
|
||||||
)
|
self.assertEquals(script.code, "Test Code\nAnother Line")
|
||||||
|
|
||||||
# test edit a builtin script
|
# test edit a builtin script
|
||||||
builtin_script = baker.make_recipe("scripts.builtin_script")
|
|
||||||
|
data = {"name": "New Name", "description": "New Desc", "code": "Some New Code"}
|
||||||
|
builtin_script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||||
|
|
||||||
resp = self.client.put(
|
resp = self.client.put(
|
||||||
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
||||||
)
|
)
|
||||||
self.assertEqual(resp.status_code, 400)
|
self.assertEqual(resp.status_code, 400)
|
||||||
|
|
||||||
# TODO Test changing script file
|
data = {
|
||||||
|
"name": script.name,
|
||||||
|
"description": "Description Change",
|
||||||
|
"shell": script.shell,
|
||||||
|
"favorite": True,
|
||||||
|
"code": "Test Code\nAnother Line",
|
||||||
|
}
|
||||||
|
# test marking a builtin script as favorite
|
||||||
|
resp = self.client.put(
|
||||||
|
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
||||||
|
)
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertTrue(Script.objects.get(pk=builtin_script.pk).favorite)
|
||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
@@ -79,6 +130,7 @@ class TestScriptViews(TacticalTestCase):
|
|||||||
resp = self.client.delete("/scripts/500/script/", format="json")
|
resp = self.client.delete("/scripts/500/script/", format="json")
|
||||||
self.assertEqual(resp.status_code, 404)
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
|
# test delete script
|
||||||
script = baker.make_recipe("scripts.script")
|
script = baker.make_recipe("scripts.script")
|
||||||
url = f"/scripts/{script.pk}/script/"
|
url = f"/scripts/{script.pk}/script/"
|
||||||
resp = self.client.delete(url, format="json")
|
resp = self.client.delete(url, format="json")
|
||||||
@@ -86,13 +138,50 @@ class TestScriptViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.assertFalse(Script.objects.filter(pk=script.pk).exists())
|
self.assertFalse(Script.objects.filter(pk=script.pk).exists())
|
||||||
|
|
||||||
|
# test delete community script
|
||||||
|
script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||||
|
url = f"/scripts/{script.pk}/script/"
|
||||||
|
resp = self.client.delete(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 400)
|
||||||
|
|
||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
# TODO Need to mock file open
|
|
||||||
def test_download_script(self):
|
def test_download_script(self):
|
||||||
pass
|
# test a call where script doesn't exist
|
||||||
|
resp = self.client.get("/scripts/500/download/", format="json")
|
||||||
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
def test_load_community_scripts(self):
|
# return script code property should be "Test"
|
||||||
|
|
||||||
|
# test powershell file
|
||||||
|
script = baker.make(
|
||||||
|
"scripts.Script", code_base64="VGVzdA==", shell="powershell"
|
||||||
|
)
|
||||||
|
url = f"/scripts/{script.pk}/download/"
|
||||||
|
|
||||||
|
resp = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(resp.data, {"filename": f"{script.name}.ps1", "code": "Test"})
|
||||||
|
|
||||||
|
# test batch file
|
||||||
|
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="cmd")
|
||||||
|
url = f"/scripts/{script.pk}/download/"
|
||||||
|
|
||||||
|
resp = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(resp.data, {"filename": f"{script.name}.bat", "code": "Test"})
|
||||||
|
|
||||||
|
# test python file
|
||||||
|
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="python")
|
||||||
|
url = f"/scripts/{script.pk}/download/"
|
||||||
|
|
||||||
|
resp = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(resp.data, {"filename": f"{script.name}.py", "code": "Test"})
|
||||||
|
|
||||||
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
def test_community_script_json_file(self):
|
||||||
valid_shells = ["powershell", "python", "cmd"]
|
valid_shells = ["powershell", "python", "cmd"]
|
||||||
|
|
||||||
if not settings.DOCKER_BUILD:
|
if not settings.DOCKER_BUILD:
|
||||||
@@ -113,5 +202,19 @@ class TestScriptViews(TacticalTestCase):
|
|||||||
self.assertTrue(script["name"])
|
self.assertTrue(script["name"])
|
||||||
self.assertTrue(script["description"])
|
self.assertTrue(script["description"])
|
||||||
self.assertTrue(script["shell"])
|
self.assertTrue(script["shell"])
|
||||||
self.assertTrue(script["description"])
|
|
||||||
self.assertIn(script["shell"], valid_shells)
|
self.assertIn(script["shell"], valid_shells)
|
||||||
|
|
||||||
|
def test_load_community_scripts(self):
|
||||||
|
with open(
|
||||||
|
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||||
|
) as f:
|
||||||
|
info = json.load(f)
|
||||||
|
|
||||||
|
Script.load_community_scripts()
|
||||||
|
|
||||||
|
community_scripts = Script.objects.filter(script_type="builtin").count()
|
||||||
|
self.assertEqual(len(info), community_scripts)
|
||||||
|
|
||||||
|
# test updating already added community scripts
|
||||||
|
Script.load_community_scripts()
|
||||||
|
self.assertEqual(len(info), community_scripts)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import os
|
import base64
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
@@ -11,9 +11,10 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.parsers import FileUploadParser
|
from rest_framework.parsers import FileUploadParser
|
||||||
|
|
||||||
from .models import Script
|
from .models import Script
|
||||||
from .serializers import ScriptSerializer
|
from .serializers import ScriptSerializer, ScriptTableSerializer
|
||||||
from tacticalrmm.utils import notify_error
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
@@ -22,74 +23,65 @@ class GetAddScripts(APIView):
|
|||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
scripts = Script.objects.all()
|
scripts = Script.objects.all()
|
||||||
return Response(ScriptSerializer(scripts, many=True).data)
|
return Response(ScriptTableSerializer(scripts, many=True).data)
|
||||||
|
|
||||||
def put(self, request, format=None):
|
def post(self, request, format=None):
|
||||||
|
|
||||||
file_obj = request.data["filename"] # the actual file in_memory object
|
|
||||||
|
|
||||||
# need to manually create the serialized data
|
|
||||||
# since javascript formData doesn't support JSON
|
|
||||||
filename = str(file_obj)
|
|
||||||
data = {
|
data = {
|
||||||
"name": request.data["name"],
|
"name": request.data["name"],
|
||||||
"filename": filename,
|
"category": request.data["category"],
|
||||||
"description": request.data["description"],
|
"description": request.data["description"],
|
||||||
"shell": request.data["shell"],
|
"shell": request.data["shell"],
|
||||||
"script_type": "userdefined", # force all uploads to be userdefined. built in scripts cannot be edited by user
|
"script_type": "userdefined", # force all uploads to be userdefined. built in scripts cannot be edited by user
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if "favorite" in request.data:
|
||||||
|
data["favorite"] = request.data["favorite"]
|
||||||
|
|
||||||
|
if "filename" in request.data:
|
||||||
|
message_bytes = request.data["filename"].read()
|
||||||
|
data["code_base64"] = base64.b64encode(message_bytes).decode(
|
||||||
|
"ascii", "ignore"
|
||||||
|
)
|
||||||
|
|
||||||
|
elif "code" in request.data:
|
||||||
|
message_bytes = request.data["code"].encode("ascii", "ignore")
|
||||||
|
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||||
|
|
||||||
serializer = ScriptSerializer(data=data, partial=True)
|
serializer = ScriptSerializer(data=data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
obj = serializer.save()
|
obj = serializer.save()
|
||||||
|
|
||||||
with open(obj.file, "wb+") as f:
|
|
||||||
for chunk in file_obj.chunks():
|
|
||||||
f.write(chunk)
|
|
||||||
|
|
||||||
return Response(f"{obj.name} was added!")
|
return Response(f"{obj.name} was added!")
|
||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteScript(APIView):
|
class GetUpdateDeleteScript(APIView):
|
||||||
parser_class = (FileUploadParser,)
|
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
script = get_object_or_404(Script, pk=pk)
|
script = get_object_or_404(Script, pk=pk)
|
||||||
return Response(ScriptSerializer(script).data)
|
return Response(ScriptSerializer(script).data)
|
||||||
|
|
||||||
def put(self, request, pk, format=None):
|
def put(self, request, pk):
|
||||||
script = get_object_or_404(Script, pk=pk)
|
script = get_object_or_404(Script, pk=pk)
|
||||||
|
|
||||||
# this will never trigger but check anyway
|
data = request.data
|
||||||
|
|
||||||
if script.script_type == "builtin":
|
if script.script_type == "builtin":
|
||||||
return notify_error("Built in scripts cannot be edited")
|
# allow only favoriting builtin scripts
|
||||||
|
if "favorite" in data:
|
||||||
|
# overwrite request data
|
||||||
|
data = {"favorite": data["favorite"]}
|
||||||
|
else:
|
||||||
|
return notify_error("Community scripts cannot be edited.")
|
||||||
|
|
||||||
data = {
|
elif "code" in data:
|
||||||
"name": request.data["name"],
|
message_bytes = data["code"].encode("ascii")
|
||||||
"description": request.data["description"],
|
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||||
"shell": request.data["shell"],
|
data.pop("code")
|
||||||
}
|
|
||||||
|
|
||||||
# if uploading a new version of the script
|
|
||||||
if "filename" in request.data:
|
|
||||||
file_obj = request.data["filename"]
|
|
||||||
data["filename"] = str(file_obj)
|
|
||||||
|
|
||||||
serializer = ScriptSerializer(data=data, instance=script, partial=True)
|
serializer = ScriptSerializer(data=data, instance=script, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
obj = serializer.save()
|
obj = serializer.save()
|
||||||
|
|
||||||
if "filename" in request.data:
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.remove(obj.file)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
with open(obj.file, "wb+") as f:
|
|
||||||
for chunk in file_obj.chunks():
|
|
||||||
f.write(chunk)
|
|
||||||
|
|
||||||
return Response(f"{obj.name} was edited!")
|
return Response(f"{obj.name} was edited!")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
@@ -97,12 +89,7 @@ class GetUpdateDeleteScript(APIView):
|
|||||||
|
|
||||||
# this will never trigger but check anyway
|
# this will never trigger but check anyway
|
||||||
if script.script_type == "builtin":
|
if script.script_type == "builtin":
|
||||||
return notify_error("Built in scripts cannot be deleted")
|
return notify_error("Community scripts cannot be deleted")
|
||||||
|
|
||||||
try:
|
|
||||||
os.remove(script.file)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
script.delete()
|
script.delete()
|
||||||
return Response(f"{script.name} was deleted!")
|
return Response(f"{script.name} was deleted!")
|
||||||
@@ -111,33 +98,12 @@ class GetUpdateDeleteScript(APIView):
|
|||||||
@api_view()
|
@api_view()
|
||||||
def download(request, pk):
|
def download(request, pk):
|
||||||
script = get_object_or_404(Script, pk=pk)
|
script = get_object_or_404(Script, pk=pk)
|
||||||
use_nginx = False
|
|
||||||
conf = "/etc/nginx/sites-available/rmm.conf"
|
|
||||||
|
|
||||||
if os.path.exists(conf):
|
if script.shell == "powershell":
|
||||||
try:
|
filename = f"{script.name}.ps1"
|
||||||
with open(conf) as f:
|
elif script.shell == "cmd":
|
||||||
for line in f.readlines():
|
filename = f"{script.name}.bat"
|
||||||
if "location" and "builtin" in line:
|
|
||||||
use_nginx = True
|
|
||||||
break
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(e)
|
|
||||||
else:
|
else:
|
||||||
use_nginx = True
|
filename = f"{script.name}.py"
|
||||||
|
|
||||||
if settings.DEBUG or not use_nginx:
|
return Response({"filename": filename, "code": script.code})
|
||||||
with open(script.file, "rb") as f:
|
|
||||||
response = HttpResponse(f.read(), content_type="text/plain")
|
|
||||||
response["Content-Disposition"] = f"attachment; filename={script.filename}"
|
|
||||||
return response
|
|
||||||
else:
|
|
||||||
response = HttpResponse()
|
|
||||||
response["Content-Disposition"] = f"attachment; filename={script.filename}"
|
|
||||||
|
|
||||||
response["X-Accel-Redirect"] = (
|
|
||||||
f"/saltscripts/{script.filename}"
|
|
||||||
if script.script_type == "userdefined"
|
|
||||||
else f"/builtin/{script.filename}"
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
|
|||||||
@@ -1343,10 +1343,5 @@
|
|||||||
"name": "tacticalagent",
|
"name": "tacticalagent",
|
||||||
"description": "Tactical RMM Monitoring Agent",
|
"description": "Tactical RMM Monitoring Agent",
|
||||||
"display_name": "Tactical RMM Agent"
|
"display_name": "Tactical RMM Agent"
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "checkrunner",
|
|
||||||
"description": "Tactical Agent Background Check Runner",
|
|
||||||
"display_name": "Tactical Agent Check Runner"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user