Compare commits
152 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ab915a08b | ||
|
|
e26fbf0328 | ||
|
|
d9a52c4a2a | ||
|
|
7b2ec90de9 | ||
|
|
d310bf8bbf | ||
|
|
2abc6cc939 | ||
|
|
56d4e694a2 | ||
|
|
5f002c9cdc | ||
|
|
759daf4b4a | ||
|
|
3a8d9568e3 | ||
|
|
ff22a9d94a | ||
|
|
a6e42d5374 | ||
|
|
a2f74e0488 | ||
|
|
ee44240569 | ||
|
|
d0828744a2 | ||
|
|
6e2e576b29 | ||
|
|
bf61e27f8a | ||
|
|
c441c30b46 | ||
|
|
0e741230ea | ||
|
|
1bfe9ac2db | ||
|
|
6812e72348 | ||
|
|
b6449d2f5b | ||
|
|
7e3ea20dce | ||
|
|
c9d6fe9dcd | ||
|
|
4a649a6b8b | ||
|
|
8fef184963 | ||
|
|
69583ca3c0 | ||
|
|
6038a68e91 | ||
|
|
fa8bd8db87 | ||
|
|
18b4f0ed0f | ||
|
|
461f9d66c9 | ||
|
|
2155103c7a | ||
|
|
c9a6839c45 | ||
|
|
9fbe331a80 | ||
|
|
a56389c4ce | ||
|
|
64656784cb | ||
|
|
6eff2c181e | ||
|
|
1aa48c6d62 | ||
|
|
c7ca1a346d | ||
|
|
fa0ec7b502 | ||
|
|
768438c136 | ||
|
|
9badea0b3c | ||
|
|
43263a1650 | ||
|
|
821e02dc75 | ||
|
|
ed011ecf28 | ||
|
|
d861de4c2f | ||
|
|
3a3b2449dc | ||
|
|
d2614406ca | ||
|
|
0798d098ae | ||
|
|
dab7ddc2bb | ||
|
|
081a96e281 | ||
|
|
a7dd881d79 | ||
|
|
8134d5e24d | ||
|
|
ba6756cd45 | ||
|
|
5d8fce21ac | ||
|
|
e7e4a5bcd4 | ||
|
|
55f33357ea | ||
|
|
90568bba31 | ||
|
|
5d6e2dc2e4 | ||
|
|
6bb33f2559 | ||
|
|
ced92554ed | ||
|
|
dff3383158 | ||
|
|
bf03c89cb2 | ||
|
|
9f1484bbef | ||
|
|
3899680e26 | ||
|
|
6bb2eb25a1 | ||
|
|
f8dfd8edb3 | ||
|
|
042be624a3 | ||
|
|
6bafa4c79a | ||
|
|
58b42fac5c | ||
|
|
3b47b9558a | ||
|
|
ccf9636296 | ||
|
|
96942719f2 | ||
|
|
69cf1c1adc | ||
|
|
d77cba40b8 | ||
|
|
968735b555 | ||
|
|
ceed9d29eb | ||
|
|
41329039ee | ||
|
|
f68b102ca8 | ||
|
|
fa36e54298 | ||
|
|
b689f57435 | ||
|
|
885fa0ff56 | ||
|
|
303acb72a3 | ||
|
|
b2a46cd0cd | ||
|
|
5a5ecb3ee3 | ||
|
|
60b4ab6a63 | ||
|
|
e4b096a08f | ||
|
|
343f55049b | ||
|
|
6b46025261 | ||
|
|
5ea503f23e | ||
|
|
ce95f9ac23 | ||
|
|
c3fb87501b | ||
|
|
dc6a343612 | ||
|
|
3a61053957 | ||
|
|
570129e4d4 | ||
|
|
3315c7045f | ||
|
|
5ae50e242c | ||
|
|
bbcf449719 | ||
|
|
aab10f7184 | ||
|
|
8d43488cb8 | ||
|
|
0a9c647e19 | ||
|
|
40db5d4aa8 | ||
|
|
9254532baa | ||
|
|
7abed47cf0 | ||
|
|
5c6ac758f7 | ||
|
|
007677962c | ||
|
|
9c4aeab64a | ||
|
|
48e6fc0efe | ||
|
|
c8be713d11 | ||
|
|
ae887c8648 | ||
|
|
5daac2531b | ||
|
|
68def00327 | ||
|
|
67e7976710 | ||
|
|
35747e937e | ||
|
|
fb439787a4 | ||
|
|
8fa368f473 | ||
|
|
c84a9d07b1 | ||
|
|
7fb46cdfc4 | ||
|
|
52985e5ddc | ||
|
|
e880935dc3 | ||
|
|
cc22b1bca5 | ||
|
|
49a5128918 | ||
|
|
fedc7dcb44 | ||
|
|
cd32b20215 | ||
|
|
15cd9832c4 | ||
|
|
f25d4e4553 | ||
|
|
12d1c82b63 | ||
|
|
aebe855078 | ||
|
|
3416a71ebd | ||
|
|
94b3fea528 | ||
|
|
ad1a9ecca1 | ||
|
|
715accfb8a | ||
|
|
a8e03c6138 | ||
|
|
f69446b648 | ||
|
|
eedfbe5846 | ||
|
|
153351cc9f | ||
|
|
1b1eec40a7 | ||
|
|
763877541a | ||
|
|
1fad7d72a2 | ||
|
|
51ea2ea879 | ||
|
|
d77a478bf0 | ||
|
|
e413c0264a | ||
|
|
f88e7f898c | ||
|
|
d07bd4a6db | ||
|
|
fb34c099d5 | ||
|
|
1d2ee56a15 | ||
|
|
86665f7f09 | ||
|
|
0d2b4af986 | ||
|
|
dc2b2eeb9f | ||
|
|
e5dbb66d53 | ||
|
|
3474b1c471 | ||
|
|
3886de5b7c |
27
.devcontainer/.env.example
Normal file
27
.devcontainer/.env.example
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
COMPOSE_PROJECT_NAME=trmm
|
||||||
|
|
||||||
|
IMAGE_REPO=tacticalrmm/
|
||||||
|
VERSION=latest
|
||||||
|
|
||||||
|
# tactical credentials (Used to login to dashboard)
|
||||||
|
TRMM_USER=tactical
|
||||||
|
TRMM_PASS=tactical
|
||||||
|
|
||||||
|
# dns settings
|
||||||
|
APP_HOST=rmm.example.com
|
||||||
|
API_HOST=api.example.com
|
||||||
|
MESH_HOST=mesh.example.com
|
||||||
|
|
||||||
|
# mesh settings
|
||||||
|
MESH_USER=tactical
|
||||||
|
MESH_PASS=tactical
|
||||||
|
MONGODB_USER=mongouser
|
||||||
|
MONGODB_PASSWORD=mongopass
|
||||||
|
|
||||||
|
# database settings
|
||||||
|
POSTGRES_USER=postgres
|
||||||
|
POSTGRES_PASS=postgrespass
|
||||||
|
|
||||||
|
# DEV SETTINGS
|
||||||
|
APP_PORT=8080
|
||||||
|
API_PORT=8000
|
||||||
28
.devcontainer/api.dockerfile
Normal file
28
.devcontainer/api.dockerfile
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
FROM python:3.8-slim
|
||||||
|
|
||||||
|
ENV TACTICAL_DIR /opt/tactical
|
||||||
|
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||||
|
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||||
|
ENV WORKSPACE_DIR /workspace
|
||||||
|
ENV TACTICAL_USER tactical
|
||||||
|
ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
RUN groupadd -g 1000 tactical && \
|
||||||
|
useradd -u 1000 -g 1000 tactical
|
||||||
|
|
||||||
|
# Copy Go Files
|
||||||
|
COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||||
|
|
||||||
|
# Copy Dev python reqs
|
||||||
|
COPY ./requirements.txt /
|
||||||
|
|
||||||
|
# Copy Docker Entrypoint
|
||||||
|
COPY ./entrypoint.sh /
|
||||||
|
RUN chmod +x /entrypoint.sh
|
||||||
|
ENTRYPOINT ["/entrypoint.sh"]
|
||||||
|
|
||||||
|
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm
|
||||||
19
.devcontainer/docker-compose.debug.yml
Normal file
19
.devcontainer/docker-compose.debug.yml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
version: '3.4'
|
||||||
|
|
||||||
|
services:
|
||||||
|
api-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 --nothreading --noreload"]
|
||||||
|
ports:
|
||||||
|
- 8000:8000
|
||||||
|
- 5678:5678
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-backend
|
||||||
233
.devcontainer/docker-compose.yml
Normal file
233
.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
version: '3.4'
|
||||||
|
|
||||||
|
services:
|
||||||
|
api-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["tactical-api"]
|
||||||
|
ports:
|
||||||
|
- 8000:8000
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-backend
|
||||||
|
|
||||||
|
app-dev:
|
||||||
|
image: node:12-alpine
|
||||||
|
ports:
|
||||||
|
- 8080:8080
|
||||||
|
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port 8080"
|
||||||
|
working_dir: /workspace/web
|
||||||
|
volumes:
|
||||||
|
- ..:/workspace:cached
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-frontend
|
||||||
|
|
||||||
|
# salt master and api
|
||||||
|
salt-dev:
|
||||||
|
image: ${IMAGE_REPO}tactical-salt:${VERSION}
|
||||||
|
restart: always
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- salt-data-dev:/etc/salt
|
||||||
|
ports:
|
||||||
|
- "4505:4505"
|
||||||
|
- "4506:4506"
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-salt
|
||||||
|
|
||||||
|
# nats
|
||||||
|
nats-dev:
|
||||||
|
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- "4222:4222"
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- ${API_HOST}
|
||||||
|
- tactical-nats
|
||||||
|
|
||||||
|
# meshcentral container
|
||||||
|
meshcentral-dev:
|
||||||
|
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
MESH_HOST: ${MESH_HOST}
|
||||||
|
MESH_USER: ${MESH_USER}
|
||||||
|
MESH_PASS: ${MESH_PASS}
|
||||||
|
MONGODB_USER: ${MONGODB_USER}
|
||||||
|
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||||
|
NGINX_HOST_IP: 172.21.0.20
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-meshcentral
|
||||||
|
- ${MESH_HOST}
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- mesh-data-dev:/home/node/app/meshcentral-data
|
||||||
|
depends_on:
|
||||||
|
- mongodb-dev
|
||||||
|
|
||||||
|
# mongodb container for meshcentral
|
||||||
|
mongodb-dev:
|
||||||
|
image: mongo:4.4
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
|
||||||
|
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
|
||||||
|
MONGO_INITDB_DATABASE: meshcentral
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-mongodb
|
||||||
|
volumes:
|
||||||
|
- mongo-dev-data:/data/db
|
||||||
|
|
||||||
|
# postgres database for api service
|
||||||
|
postgres-dev:
|
||||||
|
image: postgres:13-alpine
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: tacticalrmm
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASS}
|
||||||
|
volumes:
|
||||||
|
- postgres-data-dev:/var/lib/postgresql/data
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-postgres
|
||||||
|
|
||||||
|
# redis container for celery tasks
|
||||||
|
redis-dev:
|
||||||
|
restart: always
|
||||||
|
image: redis:6.0-alpine
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-redis
|
||||||
|
|
||||||
|
init-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
restart: on-failure
|
||||||
|
command: ["tactical-init-dev"]
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER}
|
||||||
|
POSTGRES_PASS: ${POSTGRES_PASS}
|
||||||
|
APP_HOST: ${APP_HOST}
|
||||||
|
API_HOST: ${API_HOST}
|
||||||
|
MESH_HOST: ${MESH_HOST}
|
||||||
|
MESH_USER: ${MESH_USER}
|
||||||
|
TRMM_USER: ${TRMM_USER}
|
||||||
|
TRMM_PASS: ${TRMM_PASS}
|
||||||
|
depends_on:
|
||||||
|
- postgres-dev
|
||||||
|
- meshcentral-dev
|
||||||
|
networks:
|
||||||
|
- dev
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
|
||||||
|
# container for celery worker service
|
||||||
|
celery-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["tactical-celery-dev"]
|
||||||
|
restart: always
|
||||||
|
networks:
|
||||||
|
- dev
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
depends_on:
|
||||||
|
- postgres-dev
|
||||||
|
- redis-dev
|
||||||
|
|
||||||
|
# container for celery beat service
|
||||||
|
celerybeat-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["tactical-celerybeat-dev"]
|
||||||
|
restart: always
|
||||||
|
networks:
|
||||||
|
- dev
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
depends_on:
|
||||||
|
- postgres-dev
|
||||||
|
- redis-dev
|
||||||
|
|
||||||
|
# container for celery winupdate tasks
|
||||||
|
celerywinupdate-dev:
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["tactical-celerywinupdate-dev"]
|
||||||
|
restart: always
|
||||||
|
networks:
|
||||||
|
- dev
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
depends_on:
|
||||||
|
- postgres-dev
|
||||||
|
- redis-dev
|
||||||
|
|
||||||
|
nginx-dev:
|
||||||
|
# container for tactical reverse proxy
|
||||||
|
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
APP_HOST: ${APP_HOST}
|
||||||
|
API_HOST: ${API_HOST}
|
||||||
|
MESH_HOST: ${MESH_HOST}
|
||||||
|
CERT_PUB_KEY: ${CERT_PUB_KEY}
|
||||||
|
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||||
|
APP_PORT: 8080
|
||||||
|
API_PORT: 8000
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
ipv4_address: 172.21.0.20
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
- "443:443"
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
tactical-data-dev:
|
||||||
|
postgres-data-dev:
|
||||||
|
mongo-dev-data:
|
||||||
|
mesh-data-dev:
|
||||||
|
salt-data-dev:
|
||||||
|
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
driver: bridge
|
||||||
|
ipam:
|
||||||
|
driver: default
|
||||||
|
config:
|
||||||
|
- subnet: 172.21.0.0/24
|
||||||
182
.devcontainer/entrypoint.sh
Normal file
182
.devcontainer/entrypoint.sh
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
: "${TRMM_USER:=tactical}"
|
||||||
|
: "${TRMM_PASS:=tactical}"
|
||||||
|
: "${POSTGRES_HOST:=tactical-postgres}"
|
||||||
|
: "${POSTGRES_PORT:=5432}"
|
||||||
|
: "${POSTGRES_USER:=tactical}"
|
||||||
|
: "${POSTGRES_PASS:=tactical}"
|
||||||
|
: "${POSTGRES_DB:=tacticalrmm}"
|
||||||
|
: "${SALT_HOST:=tactical-salt}"
|
||||||
|
: "${SALT_USER:=saltapi}"
|
||||||
|
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
||||||
|
: "${MESH_USER:=meshcentral}"
|
||||||
|
: "${MESH_PASS:=meshcentralpass}"
|
||||||
|
: "${MESH_HOST:=tactical-meshcentral}"
|
||||||
|
: "${API_HOST:=tactical-backend}"
|
||||||
|
: "${APP_HOST:=tactical-frontend}"
|
||||||
|
: "${REDIS_HOST:=tactical-redis}"
|
||||||
|
|
||||||
|
# Add python venv to path
|
||||||
|
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
||||||
|
|
||||||
|
function check_tactical_ready {
|
||||||
|
sleep 15
|
||||||
|
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||||
|
echo "waiting for init container to finish install or update..."
|
||||||
|
sleep 10
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
function django_setup {
|
||||||
|
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||||
|
echo "waiting for postgresql container to be ready..."
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
|
||||||
|
until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do
|
||||||
|
echo "waiting for meshcentral container to be ready..."
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "setting up django environment"
|
||||||
|
|
||||||
|
# configure django settings
|
||||||
|
MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token)
|
||||||
|
|
||||||
|
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||||
|
|
||||||
|
# write salt pass to tmp dir
|
||||||
|
if [ ! -f "${TACTICAL__DIR}/tmp/salt_pass" ]; then
|
||||||
|
SALT_PASS=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
|
||||||
|
echo "${SALT_PASS}" > ${TACTICAL_DIR}/tmp/salt_pass
|
||||||
|
else
|
||||||
|
SALT_PASS=$(cat ${TACTICAL_DIR}/tmp/salt_pass)
|
||||||
|
fi
|
||||||
|
|
||||||
|
localvars="$(cat << EOF
|
||||||
|
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||||
|
|
||||||
|
DEBUG = True
|
||||||
|
|
||||||
|
DOCKER_BUILD = True
|
||||||
|
|
||||||
|
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
|
||||||
|
KEY_FILE = '/opt/tactical/certs/privkey.pem'
|
||||||
|
|
||||||
|
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ['${API_HOST}', 'localhost', '127.0.0.1']
|
||||||
|
|
||||||
|
ADMIN_URL = 'admin/'
|
||||||
|
|
||||||
|
CORS_ORIGIN_ALLOW_ALL = True
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
'default': {
|
||||||
|
'ENGINE': 'django.db.backends.postgresql',
|
||||||
|
'NAME': '${POSTGRES_DB}',
|
||||||
|
'USER': '${POSTGRES_USER}',
|
||||||
|
'PASSWORD': '${POSTGRES_PASS}',
|
||||||
|
'HOST': '${POSTGRES_HOST}',
|
||||||
|
'PORT': '${POSTGRES_PORT}',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
REST_FRAMEWORK = {
|
||||||
|
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
|
||||||
|
|
||||||
|
'DEFAULT_PERMISSION_CLASSES': (
|
||||||
|
'rest_framework.permissions.IsAuthenticated',
|
||||||
|
),
|
||||||
|
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||||
|
'knox.auth.TokenAuthentication',
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
if not DEBUG:
|
||||||
|
REST_FRAMEWORK.update({
|
||||||
|
'DEFAULT_RENDERER_CLASSES': (
|
||||||
|
'rest_framework.renderers.JSONRenderer',
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
SALT_USERNAME = '${SALT_USER}'
|
||||||
|
SALT_PASSWORD = '${SALT_PASS}'
|
||||||
|
SALT_HOST = '${SALT_HOST}'
|
||||||
|
MESH_USERNAME = '${MESH_USER}'
|
||||||
|
MESH_SITE = 'https://${MESH_HOST}'
|
||||||
|
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||||
|
REDIS_HOST = '${REDIS_HOST}'
|
||||||
|
EOF
|
||||||
|
)"
|
||||||
|
|
||||||
|
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||||
|
|
||||||
|
# run migrations and init scripts
|
||||||
|
python manage.py migrate --no-input
|
||||||
|
python manage.py collectstatic --no-input
|
||||||
|
python manage.py initial_db_setup
|
||||||
|
python manage.py initial_mesh_setup
|
||||||
|
python manage.py load_chocos
|
||||||
|
python manage.py load_community_scripts
|
||||||
|
python manage.py reload_nats
|
||||||
|
|
||||||
|
# create super user
|
||||||
|
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-init-dev' ]; then
|
||||||
|
|
||||||
|
# make directories if they don't exist
|
||||||
|
mkdir -p ${TACTICAL_DIR}/tmp
|
||||||
|
|
||||||
|
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||||
|
|
||||||
|
# setup Python virtual env and install dependencies
|
||||||
|
test -f ${VIRTUAL_ENV} && python -m venv --copies ${VIRTUAL_ENV}
|
||||||
|
pip install --no-cache-dir -r /requirements.txt
|
||||||
|
|
||||||
|
django_setup
|
||||||
|
|
||||||
|
# create .env file for frontend
|
||||||
|
webenv="$(cat << EOF
|
||||||
|
PROD_URL = "http://${API_HOST}:8000"
|
||||||
|
DEV_URL = "http://${API_HOST}:8000"
|
||||||
|
DEV_HOST = 0.0.0.0
|
||||||
|
DEV_PORT = 8080
|
||||||
|
EOF
|
||||||
|
)"
|
||||||
|
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
||||||
|
|
||||||
|
# chown everything to tactical user
|
||||||
|
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||||
|
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
|
||||||
|
|
||||||
|
# create install ready file
|
||||||
|
su -c "echo 'tactical-init' > ${TACTICAL_READY_FILE}" "${TACTICAL_USER}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-api' ]; then
|
||||||
|
check_tactical_ready
|
||||||
|
python manage.py runserver 0.0.0.0:8000
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-celery-dev' ]; then
|
||||||
|
check_tactical_ready
|
||||||
|
celery -A tacticalrmm worker -l debug
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||||
|
check_tactical_ready
|
||||||
|
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||||
|
celery -A tacticalrmm beat -l debug
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-celerywinupdate-dev' ]; then
|
||||||
|
check_tactical_ready
|
||||||
|
celery -A tacticalrmm worker -Q wupdate -l debug
|
||||||
|
fi
|
||||||
44
.devcontainer/requirements.txt
Normal file
44
.devcontainer/requirements.txt
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||||
|
amqp==2.6.1
|
||||||
|
asgiref==3.3.1
|
||||||
|
asyncio-nats-client==0.11.4
|
||||||
|
billiard==3.6.3.0
|
||||||
|
celery==4.4.6
|
||||||
|
certifi==2020.12.5
|
||||||
|
cffi==1.14.3
|
||||||
|
chardet==3.0.4
|
||||||
|
cryptography==3.2.1
|
||||||
|
decorator==4.4.2
|
||||||
|
Django==3.1.4
|
||||||
|
django-cors-headers==3.5.0
|
||||||
|
django-rest-knox==4.1.0
|
||||||
|
djangorestframework==3.12.2
|
||||||
|
future==0.18.2
|
||||||
|
idna==2.10
|
||||||
|
kombu==4.6.11
|
||||||
|
loguru==0.5.3
|
||||||
|
msgpack==1.0.0
|
||||||
|
packaging==20.4
|
||||||
|
psycopg2-binary==2.8.6
|
||||||
|
pycparser==2.20
|
||||||
|
pycryptodome==3.9.9
|
||||||
|
pyotp==2.4.1
|
||||||
|
pyparsing==2.4.7
|
||||||
|
pytz==2020.4
|
||||||
|
qrcode==6.1
|
||||||
|
redis==3.5.3
|
||||||
|
requests==2.25.0
|
||||||
|
six==1.15.0
|
||||||
|
sqlparse==0.4.1
|
||||||
|
twilio==6.49.0
|
||||||
|
urllib3==1.26.2
|
||||||
|
validators==0.18.1
|
||||||
|
vine==1.3.0
|
||||||
|
websockets==8.1
|
||||||
|
zipp==3.4.0
|
||||||
|
black
|
||||||
|
Werkzeug
|
||||||
|
django-extensions
|
||||||
|
coverage
|
||||||
|
coveralls
|
||||||
|
model_bakery
|
||||||
@@ -1,5 +1,25 @@
|
|||||||
.git
|
**/__pycache__
|
||||||
.cache
|
**/.classpath
|
||||||
**/*.env
|
**/.dockerignore
|
||||||
**/env
|
**/.env
|
||||||
|
**/.git
|
||||||
|
**/.gitignore
|
||||||
|
**/.project
|
||||||
|
**/.settings
|
||||||
|
**/.toolstarget
|
||||||
|
**/.vs
|
||||||
|
**/.vscode
|
||||||
|
**/*.*proj.user
|
||||||
|
**/*.dbmdl
|
||||||
|
**/*.jfm
|
||||||
|
**/azds.yaml
|
||||||
|
**/charts
|
||||||
|
**/docker-compose*
|
||||||
|
**/Dockerfile*
|
||||||
**/node_modules
|
**/node_modules
|
||||||
|
**/npm-debug.log
|
||||||
|
**/obj
|
||||||
|
**/secrets.dev.yaml
|
||||||
|
**/values.dev.yaml
|
||||||
|
**/env
|
||||||
|
README.md
|
||||||
|
|||||||
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: wh1te909
|
||||||
|
patreon: # Replace with a single Patreon username
|
||||||
|
open_collective: # Replace with a single Open Collective username
|
||||||
|
ko_fi: # Replace with a single Ko-fi username
|
||||||
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
|
otechie: # Replace with a single Otechie username
|
||||||
|
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -14,6 +14,20 @@
|
|||||||
"0.0.0.0:8000"
|
"0.0.0.0:8000"
|
||||||
],
|
],
|
||||||
"django": true
|
"django": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Django: Docker Remote Attach",
|
||||||
|
"type": "python",
|
||||||
|
"request": "attach",
|
||||||
|
"port": 5678,
|
||||||
|
"host": "localhost",
|
||||||
|
"preLaunchTask": "docker debug",
|
||||||
|
"pathMappings": [
|
||||||
|
{
|
||||||
|
"localRoot": "${workspaceFolder}/api/tacticalrmm",
|
||||||
|
"remoteRoot": "/workspace/api/tacticalrmm"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
19
.vscode/settings.json
vendored
19
.vscode/settings.json
vendored
@@ -41,4 +41,23 @@
|
|||||||
"**/*.zip": true
|
"**/*.zip": true
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"go.useLanguageServer": true,
|
||||||
|
"[go]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": false,
|
||||||
|
},
|
||||||
|
"editor.snippetSuggestions": "none",
|
||||||
|
},
|
||||||
|
"[go.mod]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"gopls": {
|
||||||
|
"usePlaceholders": true,
|
||||||
|
"completeUnimported": true,
|
||||||
|
"staticcheck": true,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
23
.vscode/tasks.json
vendored
Normal file
23
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||||
|
// for the documentation about the tasks.json format
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "docker debug",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "docker-compose",
|
||||||
|
"args": [
|
||||||
|
"-p",
|
||||||
|
"trmm",
|
||||||
|
"-f",
|
||||||
|
".devcontainer/docker-compose.yml",
|
||||||
|
"-f",
|
||||||
|
".devcontainer/docker-compose.debug.yml",
|
||||||
|
"up",
|
||||||
|
"-d",
|
||||||
|
"--build"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -36,7 +36,7 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
|||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
### Requirements
|
### Requirements
|
||||||
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04)
|
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10)
|
||||||
- A domain you own with at least 3 subdomains
|
- A domain you own with at least 3 subdomains
|
||||||
- Google Authenticator app (2 factor is NOT optional)
|
- Google Authenticator app (2 factor is NOT optional)
|
||||||
|
|
||||||
|
|||||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('accounts', '0002_auto_20200810_0544'),
|
("accounts", "0002_auto_20200810_0544"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
field=models.DateTimeField(auto_now=True, null=True),
|
field=models.DateTimeField(auto_now=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,24 +6,24 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('accounts', '0003_auto_20200922_1344'),
|
("accounts", "0003_auto_20200922_1344"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('accounts', '0004_auto_20201002_1257'),
|
("accounts", "0004_auto_20201002_1257"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
field=models.DateTimeField(auto_now=True, null=True),
|
field=models.DateTimeField(auto_now=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,13 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('accounts', '0007_update_agent_primary_key'),
|
("accounts", "0007_update_agent_primary_key"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='user',
|
model_name="user",
|
||||||
name='dark_mode',
|
name="dark_mode",
|
||||||
field=models.BooleanField(default=True),
|
field=models.BooleanField(default=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2020-12-10 17:00
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("accounts", "0008_user_dark_mode"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="show_community_scripts",
|
||||||
|
field=models.BooleanField(default=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-14 01:23
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("accounts", "0009_user_show_community_scripts"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="agent_dblclick_action",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("editagent", "Edit Agent"),
|
||||||
|
("takecontrol", "Take Control"),
|
||||||
|
("remotebg", "Remote Background"),
|
||||||
|
],
|
||||||
|
default="editagent",
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -3,11 +3,21 @@ from django.contrib.auth.models import AbstractUser
|
|||||||
|
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
|
||||||
|
AGENT_DBLCLICK_CHOICES = [
|
||||||
|
("editagent", "Edit Agent"),
|
||||||
|
("takecontrol", "Take Control"),
|
||||||
|
("remotebg", "Remote Background"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class User(AbstractUser, BaseAuditModel):
|
class User(AbstractUser, BaseAuditModel):
|
||||||
is_active = models.BooleanField(default=True)
|
is_active = models.BooleanField(default=True)
|
||||||
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
||||||
dark_mode = models.BooleanField(default=True)
|
dark_mode = models.BooleanField(default=True)
|
||||||
|
show_community_scripts = models.BooleanField(default=True)
|
||||||
|
agent_dblclick_action = models.CharField(
|
||||||
|
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
||||||
|
)
|
||||||
|
|
||||||
agent = models.OneToOneField(
|
agent = models.OneToOneField(
|
||||||
"agents.Agent",
|
"agents.Agent",
|
||||||
|
|||||||
@@ -155,6 +155,33 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_put_root_user(self):
|
||||||
|
url = f"/accounts/{self.john.pk}/users/"
|
||||||
|
data = {
|
||||||
|
"id": self.john.pk,
|
||||||
|
"username": "john",
|
||||||
|
"email": "johndoe@xlawgaming.com",
|
||||||
|
"first_name": "John",
|
||||||
|
"last_name": "Doe",
|
||||||
|
}
|
||||||
|
r = self.client.put(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_put_not_root_user(self):
|
||||||
|
url = f"/accounts/{self.john.pk}/users/"
|
||||||
|
data = {
|
||||||
|
"id": self.john.pk,
|
||||||
|
"username": "john",
|
||||||
|
"email": "johndoe@xlawgaming.com",
|
||||||
|
"first_name": "John",
|
||||||
|
"last_name": "Doe",
|
||||||
|
}
|
||||||
|
self.client.force_authenticate(user=self.alice)
|
||||||
|
r = self.client.put(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
def test_delete(self):
|
def test_delete(self):
|
||||||
url = f"/accounts/{self.john.pk}/users/"
|
url = f"/accounts/{self.john.pk}/users/"
|
||||||
r = self.client.delete(url)
|
r = self.client.delete(url)
|
||||||
@@ -166,6 +193,19 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_delete_root_user(self):
|
||||||
|
url = f"/accounts/{self.john.pk}/users/"
|
||||||
|
r = self.client.delete(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_delete_non_root_user(self):
|
||||||
|
url = f"/accounts/{self.john.pk}/users/"
|
||||||
|
self.client.force_authenticate(user=self.alice)
|
||||||
|
r = self.client.delete(url)
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
|
|
||||||
class TestUserAction(TacticalTestCase):
|
class TestUserAction(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@@ -184,6 +224,21 @@ class TestUserAction(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_post_root_user(self):
|
||||||
|
url = "/accounts/users/reset/"
|
||||||
|
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_post_non_root_user(self):
|
||||||
|
url = "/accounts/users/reset/"
|
||||||
|
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||||
|
self.client.force_authenticate(user=self.alice)
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
def test_put(self):
|
def test_put(self):
|
||||||
url = "/accounts/users/reset/"
|
url = "/accounts/users/reset/"
|
||||||
data = {"id": self.john.pk}
|
data = {"id": self.john.pk}
|
||||||
@@ -195,12 +250,46 @@ class TestUserAction(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
def test_darkmode(self):
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_put_root_user(self):
|
||||||
|
url = "/accounts/users/reset/"
|
||||||
|
data = {"id": self.john.pk}
|
||||||
|
r = self.client.put(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
user = User.objects.get(pk=self.john.pk)
|
||||||
|
self.assertEqual(user.totp_key, "")
|
||||||
|
|
||||||
|
@override_settings(ROOT_USER="john")
|
||||||
|
def test_put_non_root_user(self):
|
||||||
|
url = "/accounts/users/reset/"
|
||||||
|
data = {"id": self.john.pk}
|
||||||
|
self.client.force_authenticate(user=self.alice)
|
||||||
|
r = self.client.put(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
|
def test_user_ui(self):
|
||||||
url = "/accounts/users/ui/"
|
url = "/accounts/users/ui/"
|
||||||
data = {"dark_mode": False}
|
data = {"dark_mode": False}
|
||||||
r = self.client.patch(url, data, format="json")
|
r = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
data = {"show_community_scripts": True}
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
data = {"agent_dblclick_action": "editagent"}
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
data = {"agent_dblclick_action": "remotebg"}
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
data = {"agent_dblclick_action": "takecontrol"}
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ class LoginView(KnoxLoginView):
|
|||||||
|
|
||||||
if settings.DEBUG and token == "sekret":
|
if settings.DEBUG and token == "sekret":
|
||||||
valid = True
|
valid = True
|
||||||
elif totp.verify(token, valid_window=1):
|
elif totp.verify(token, valid_window=10):
|
||||||
valid = True
|
valid = True
|
||||||
|
|
||||||
if valid:
|
if valid:
|
||||||
@@ -108,6 +108,13 @@ class GetUpdateDeleteUser(APIView):
|
|||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
user = get_object_or_404(User, pk=pk)
|
user = get_object_or_404(User, pk=pk)
|
||||||
|
|
||||||
|
if (
|
||||||
|
hasattr(settings, "ROOT_USER")
|
||||||
|
and request.user != user
|
||||||
|
and user.username == settings.ROOT_USER
|
||||||
|
):
|
||||||
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
@@ -115,7 +122,15 @@ class GetUpdateDeleteUser(APIView):
|
|||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
get_object_or_404(User, pk=pk).delete()
|
user = get_object_or_404(User, pk=pk)
|
||||||
|
if (
|
||||||
|
hasattr(settings, "ROOT_USER")
|
||||||
|
and request.user != user
|
||||||
|
and user.username == settings.ROOT_USER
|
||||||
|
):
|
||||||
|
return notify_error("The root user cannot be deleted from the UI")
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
@@ -124,8 +139,14 @@ class UserActions(APIView):
|
|||||||
|
|
||||||
# reset password
|
# reset password
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
|
|
||||||
user = get_object_or_404(User, pk=request.data["id"])
|
user = get_object_or_404(User, pk=request.data["id"])
|
||||||
|
if (
|
||||||
|
hasattr(settings, "ROOT_USER")
|
||||||
|
and request.user != user
|
||||||
|
and user.username == settings.ROOT_USER
|
||||||
|
):
|
||||||
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
user.set_password(request.data["password"])
|
user.set_password(request.data["password"])
|
||||||
user.save()
|
user.save()
|
||||||
|
|
||||||
@@ -133,8 +154,14 @@ class UserActions(APIView):
|
|||||||
|
|
||||||
# reset two factor token
|
# reset two factor token
|
||||||
def put(self, request):
|
def put(self, request):
|
||||||
|
|
||||||
user = get_object_or_404(User, pk=request.data["id"])
|
user = get_object_or_404(User, pk=request.data["id"])
|
||||||
|
if (
|
||||||
|
hasattr(settings, "ROOT_USER")
|
||||||
|
and request.user != user
|
||||||
|
and user.username == settings.ROOT_USER
|
||||||
|
):
|
||||||
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
user.totp_key = ""
|
user.totp_key = ""
|
||||||
user.save()
|
user.save()
|
||||||
|
|
||||||
@@ -161,6 +188,17 @@ class TOTPSetup(APIView):
|
|||||||
class UserUI(APIView):
|
class UserUI(APIView):
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
user = request.user
|
user = request.user
|
||||||
|
|
||||||
|
if "dark_mode" in request.data:
|
||||||
user.dark_mode = request.data["dark_mode"]
|
user.dark_mode = request.data["dark_mode"]
|
||||||
user.save(update_fields=["dark_mode"])
|
user.save(update_fields=["dark_mode"])
|
||||||
|
|
||||||
|
if "show_community_scripts" in request.data:
|
||||||
|
user.show_community_scripts = request.data["show_community_scripts"]
|
||||||
|
user.save(update_fields=["show_community_scripts"])
|
||||||
|
|
||||||
|
if "agent_dblclick_action" in request.data:
|
||||||
|
user.agent_dblclick_action = request.data["agent_dblclick_action"]
|
||||||
|
user.save(update_fields=["agent_dblclick_action"])
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
@@ -7,14 +7,20 @@ import django.db.models.deletion
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('clients', '0006_deployment'),
|
("clients", "0006_deployment"),
|
||||||
('agents', '0020_auto_20201025_2129'),
|
("agents", "0020_auto_20201025_2129"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='agent',
|
model_name="agent",
|
||||||
name='site_link',
|
name="site_link",
|
||||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='clients.site'),
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="agents",
|
||||||
|
to="clients.site",
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,16 +6,16 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('agents', '0022_update_site_primary_key'),
|
("agents", "0022_update_site_primary_key"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='agent',
|
model_name="agent",
|
||||||
name='client',
|
name="client",
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name='agent',
|
model_name="agent",
|
||||||
name='site',
|
name="site",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,13 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('agents', '0023_auto_20201101_2312'),
|
("agents", "0023_auto_20201101_2312"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RenameField(
|
migrations.RenameField(
|
||||||
model_name='agent',
|
model_name="agent",
|
||||||
old_name='site_link',
|
old_name="site_link",
|
||||||
new_name='site',
|
new_name="site",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,22 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('agents', '0024_auto_20201101_2319'),
|
("agents", "0024_auto_20201101_2319"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='recoveryaction',
|
model_name="recoveryaction",
|
||||||
name='mode',
|
name="mode",
|
||||||
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC')], default='mesh', max_length=50),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("salt", "Salt"),
|
||||||
|
("mesh", "Mesh"),
|
||||||
|
("command", "Command"),
|
||||||
|
("rpc", "Nats RPC"),
|
||||||
|
],
|
||||||
|
default="mesh",
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,23 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('agents', '0025_auto_20201122_0407'),
|
("agents", "0025_auto_20201122_0407"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='recoveryaction',
|
model_name="recoveryaction",
|
||||||
name='mode',
|
name="mode",
|
||||||
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC'), ('checkrunner', 'Checkrunner')], default='mesh', max_length=50),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("salt", "Salt"),
|
||||||
|
("mesh", "Mesh"),
|
||||||
|
("command", "Command"),
|
||||||
|
("rpc", "Nats RPC"),
|
||||||
|
("checkrunner", "Checkrunner"),
|
||||||
|
],
|
||||||
|
default="mesh",
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -164,13 +164,11 @@ class Agent(BaseAuditModel):
|
|||||||
elif i.status == "failing":
|
elif i.status == "failing":
|
||||||
failing += 1
|
failing += 1
|
||||||
|
|
||||||
has_failing_checks = True if failing > 0 else False
|
|
||||||
|
|
||||||
ret = {
|
ret = {
|
||||||
"total": total,
|
"total": total,
|
||||||
"passing": passing,
|
"passing": passing,
|
||||||
"failing": failing,
|
"failing": failing,
|
||||||
"has_failing_checks": has_failing_checks,
|
"has_failing_checks": failing > 0,
|
||||||
}
|
}
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|||||||
@@ -4,8 +4,10 @@ from time import sleep
|
|||||||
import random
|
import random
|
||||||
import requests
|
import requests
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
|
from typing import List
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from scripts.models import Script
|
||||||
|
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
from agents.models import Agent, AgentOutage
|
from agents.models import Agent, AgentOutage
|
||||||
@@ -14,153 +16,121 @@ from logs.models import PendingAction
|
|||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
OLD_64_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2.exe"
|
|
||||||
OLD_32_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2-x86.exe"
|
def _check_agent_service(pk: int) -> None:
|
||||||
|
agent = Agent.objects.get(pk=pk)
|
||||||
|
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=2))
|
||||||
|
if r == "pong":
|
||||||
|
logger.info(
|
||||||
|
f"Detected crashed tacticalagent service on {agent.hostname}, attempting recovery"
|
||||||
|
)
|
||||||
|
data = {"func": "recover", "payload": {"mode": "tacagent"}}
|
||||||
|
asyncio.run(agent.nats_cmd(data, wait=False))
|
||||||
|
|
||||||
|
|
||||||
|
def _check_in_full(pk: int) -> None:
|
||||||
|
agent = Agent.objects.get(pk=pk)
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "checkinfull"}, wait=False))
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def send_agent_update_task(pks, version):
|
def check_in_task() -> None:
|
||||||
assert isinstance(pks, list)
|
q = Agent.objects.only("pk", "version")
|
||||||
|
agents: List[int] = [
|
||||||
q = Agent.objects.filter(pk__in=pks)
|
i.pk for i in q if pyver.parse(i.version) >= pyver.parse("1.1.12")
|
||||||
agents = [i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)]
|
]
|
||||||
|
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
|
||||||
|
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
for pk in chunk:
|
for pk in chunk:
|
||||||
agent = Agent.objects.get(pk=pk)
|
_check_in_full(pk)
|
||||||
|
sleep(0.1)
|
||||||
|
rand = random.randint(3, 7)
|
||||||
|
sleep(rand)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def monitor_agents_task() -> None:
|
||||||
|
q = Agent.objects.all()
|
||||||
|
agents: List[int] = [i.pk for i in q if i.has_nats and i.status != "online"]
|
||||||
|
for agent in agents:
|
||||||
|
_check_agent_service(agent)
|
||||||
|
|
||||||
|
|
||||||
|
def agent_update(pk: int) -> str:
|
||||||
|
agent = Agent.objects.get(pk=pk)
|
||||||
# skip if we can't determine the arch
|
# skip if we can't determine the arch
|
||||||
if agent.arch is None:
|
if agent.arch is None:
|
||||||
logger.warning(
|
logger.warning(f"Unable to determine arch on {agent.hostname}. Skipping.")
|
||||||
f"Unable to determine arch on {agent.salt_id}. Skipping."
|
return "noarch"
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# golang agent only backwards compatible with py agent 0.11.2
|
version = settings.LATEST_AGENT_VER
|
||||||
# force an upgrade to the latest python agent if version < 0.11.2
|
|
||||||
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
|
|
||||||
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
|
|
||||||
inno = (
|
|
||||||
"winagent-v0.11.2.exe"
|
|
||||||
if agent.arch == "64"
|
|
||||||
else "winagent-v0.11.2-x86.exe"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
url = agent.winagent_dl
|
url = agent.winagent_dl
|
||||||
inno = agent.win_inno_exe
|
inno = agent.win_inno_exe
|
||||||
|
|
||||||
if agent.has_nats:
|
if agent.has_nats:
|
||||||
|
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||||
if agent.pendingactions.filter(
|
if agent.pendingactions.filter(
|
||||||
action_type="agentupdate", status="pending"
|
action_type="agentupdate", status="pending"
|
||||||
).exists():
|
).exists():
|
||||||
action = agent.pendingactions.filter(
|
action = agent.pendingactions.filter(
|
||||||
action_type="agentupdate", status="pending"
|
action_type="agentupdate", status="pending"
|
||||||
).last()
|
).last()
|
||||||
if pyver.parse(action.details["version"]) < pyver.parse(
|
if pyver.parse(action.details["version"]) < pyver.parse(version):
|
||||||
settings.LATEST_AGENT_VER
|
|
||||||
):
|
|
||||||
action.delete()
|
action.delete()
|
||||||
else:
|
else:
|
||||||
continue
|
return "pending"
|
||||||
|
|
||||||
PendingAction.objects.create(
|
PendingAction.objects.create(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
action_type="agentupdate",
|
action_type="agentupdate",
|
||||||
details={
|
details={
|
||||||
"url": agent.winagent_dl,
|
|
||||||
"version": settings.LATEST_AGENT_VER,
|
|
||||||
"inno": agent.win_inno_exe,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
# TODO
|
|
||||||
# Salt is deprecated, remove this once salt is gone
|
|
||||||
else:
|
|
||||||
r = agent.salt_api_async(
|
|
||||||
func="win_agent.do_agent_update_v2",
|
|
||||||
kwargs={
|
|
||||||
"inno": inno,
|
|
||||||
"url": url,
|
"url": url,
|
||||||
|
"version": version,
|
||||||
|
"inno": inno,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
sleep(5)
|
else:
|
||||||
|
nats_data = {
|
||||||
|
"func": "agentupdate",
|
||||||
|
"payload": {
|
||||||
|
"url": url,
|
||||||
|
"version": version,
|
||||||
|
"inno": inno,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||||
|
|
||||||
|
return "created"
|
||||||
|
|
||||||
|
return "not supported"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def auto_self_agent_update_task():
|
def send_agent_update_task(pks: List[int], version: str) -> None:
|
||||||
|
q = Agent.objects.filter(pk__in=pks)
|
||||||
|
agents: List[int] = [
|
||||||
|
i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)
|
||||||
|
]
|
||||||
|
|
||||||
|
for pk in agents:
|
||||||
|
agent_update(pk)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def auto_self_agent_update_task() -> None:
|
||||||
core = CoreSettings.objects.first()
|
core = CoreSettings.objects.first()
|
||||||
if not core.agent_auto_update:
|
if not core.agent_auto_update:
|
||||||
logger.info("Agent auto update is disabled. Skipping.")
|
|
||||||
return
|
return
|
||||||
|
|
||||||
q = Agent.objects.only("pk", "version")
|
q = Agent.objects.only("pk", "version")
|
||||||
agents = [
|
pks: List[int] = [
|
||||||
i.pk
|
i.pk
|
||||||
for i in q
|
for i in q
|
||||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
]
|
]
|
||||||
|
|
||||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
for pk in pks:
|
||||||
|
agent_update(pk)
|
||||||
for chunk in chunks:
|
|
||||||
for pk in chunk:
|
|
||||||
agent = Agent.objects.get(pk=pk)
|
|
||||||
|
|
||||||
# skip if we can't determine the arch
|
|
||||||
if agent.arch is None:
|
|
||||||
logger.warning(
|
|
||||||
f"Unable to determine arch on {agent.salt_id}. Skipping."
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# golang agent only backwards compatible with py agent 0.11.2
|
|
||||||
# force an upgrade to the latest python agent if version < 0.11.2
|
|
||||||
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
|
|
||||||
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
|
|
||||||
inno = (
|
|
||||||
"winagent-v0.11.2.exe"
|
|
||||||
if agent.arch == "64"
|
|
||||||
else "winagent-v0.11.2-x86.exe"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
url = agent.winagent_dl
|
|
||||||
inno = agent.win_inno_exe
|
|
||||||
|
|
||||||
if agent.has_nats:
|
|
||||||
if agent.pendingactions.filter(
|
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).exists():
|
|
||||||
action = agent.pendingactions.filter(
|
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).last()
|
|
||||||
if pyver.parse(action.details["version"]) < pyver.parse(
|
|
||||||
settings.LATEST_AGENT_VER
|
|
||||||
):
|
|
||||||
action.delete()
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
PendingAction.objects.create(
|
|
||||||
agent=agent,
|
|
||||||
action_type="agentupdate",
|
|
||||||
details={
|
|
||||||
"url": agent.winagent_dl,
|
|
||||||
"version": settings.LATEST_AGENT_VER,
|
|
||||||
"inno": agent.win_inno_exe,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
# TODO
|
|
||||||
# Salt is deprecated, remove this once salt is gone
|
|
||||||
else:
|
|
||||||
r = agent.salt_api_async(
|
|
||||||
func="win_agent.do_agent_update_v2",
|
|
||||||
kwargs={
|
|
||||||
"inno": inno,
|
|
||||||
"url": url,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
sleep(5)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
@@ -171,8 +141,14 @@ def sync_sysinfo_task():
|
|||||||
for i in agents
|
for i in agents
|
||||||
if pyver.parse(i.version) >= pyver.parse("1.1.3") and i.status == "online"
|
if pyver.parse(i.version) >= pyver.parse("1.1.3") and i.status == "online"
|
||||||
]
|
]
|
||||||
for agent in online:
|
|
||||||
|
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
||||||
|
for chunk in chunks:
|
||||||
|
for agent in chunk:
|
||||||
asyncio.run(agent.nats_cmd({"func": "sync"}, wait=False))
|
asyncio.run(agent.nats_cmd({"func": "sync"}, wait=False))
|
||||||
|
sleep(0.1)
|
||||||
|
rand = random.randint(3, 7)
|
||||||
|
sleep(rand)
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
@@ -315,8 +291,68 @@ def agent_outages_task():
|
|||||||
outage = AgentOutage(agent=agent)
|
outage = AgentOutage(agent=agent)
|
||||||
outage.save()
|
outage.save()
|
||||||
|
|
||||||
|
# add a null check history to allow gaps in graph
|
||||||
|
for check in agent.agentchecks.all():
|
||||||
|
check.add_check_history(None)
|
||||||
|
|
||||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
if agent.overdue_email_alert and not agent.maintenance_mode:
|
||||||
agent_outage_email_task.delay(pk=outage.pk)
|
agent_outage_email_task.delay(pk=outage.pk)
|
||||||
|
|
||||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
if agent.overdue_text_alert and not agent.maintenance_mode:
|
||||||
agent_outage_sms_task.delay(pk=outage.pk)
|
agent_outage_sms_task.delay(pk=outage.pk)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def install_salt_task(pk: int) -> None:
|
||||||
|
sleep(20)
|
||||||
|
agent = Agent.objects.get(pk=pk)
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "installsalt"}, wait=False))
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def run_script_email_results_task(
|
||||||
|
agentpk: int, scriptpk: int, nats_timeout: int, nats_data: dict, emails: List[str]
|
||||||
|
):
|
||||||
|
agent = Agent.objects.get(pk=agentpk)
|
||||||
|
script = Script.objects.get(pk=scriptpk)
|
||||||
|
nats_data["func"] = "runscriptfull"
|
||||||
|
r = asyncio.run(agent.nats_cmd(nats_data, timeout=nats_timeout))
|
||||||
|
if r == "timeout":
|
||||||
|
logger.error(f"{agent.hostname} timed out running script.")
|
||||||
|
return
|
||||||
|
|
||||||
|
CORE = CoreSettings.objects.first()
|
||||||
|
subject = f"{agent.hostname} {script.name} Results"
|
||||||
|
exec_time = "{:.4f}".format(r["execution_time"])
|
||||||
|
body = (
|
||||||
|
subject
|
||||||
|
+ f"\nReturn code: {r['retcode']}\nExecution time: {exec_time} seconds\nStdout: {r['stdout']}\nStderr: {r['stderr']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
import smtplib
|
||||||
|
from email.message import EmailMessage
|
||||||
|
|
||||||
|
msg = EmailMessage()
|
||||||
|
msg["Subject"] = subject
|
||||||
|
msg["From"] = CORE.smtp_from_email
|
||||||
|
|
||||||
|
if emails:
|
||||||
|
msg["To"] = ", ".join(emails)
|
||||||
|
else:
|
||||||
|
msg["To"] = ", ".join(CORE.email_alert_recipients)
|
||||||
|
|
||||||
|
msg.set_content(body)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
|
||||||
|
if CORE.smtp_requires_auth:
|
||||||
|
server.ehlo()
|
||||||
|
server.starttls()
|
||||||
|
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
|
||||||
|
server.send_message(msg)
|
||||||
|
server.quit()
|
||||||
|
else:
|
||||||
|
server.send_message(msg)
|
||||||
|
server.quit()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
|||||||
@@ -5,19 +5,20 @@ from unittest.mock import patch
|
|||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
|
|
||||||
|
from django.test import TestCase, override_settings
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
from logs.models import PendingAction
|
||||||
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
from .serializers import AgentSerializer
|
from .serializers import AgentSerializer
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
from .models import Agent
|
from .models import Agent
|
||||||
from .tasks import (
|
from .tasks import (
|
||||||
|
agent_recovery_sms_task,
|
||||||
auto_self_agent_update_task,
|
auto_self_agent_update_task,
|
||||||
sync_salt_modules_task,
|
sync_salt_modules_task,
|
||||||
batch_sync_modules_task,
|
batch_sync_modules_task,
|
||||||
OLD_64_PY_AGENT,
|
|
||||||
OLD_32_PY_AGENT,
|
|
||||||
)
|
)
|
||||||
from winupdate.models import WinUpdatePolicy
|
from winupdate.models import WinUpdatePolicy
|
||||||
|
|
||||||
@@ -549,6 +550,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "command",
|
"mode": "command",
|
||||||
|
"monType": "all",
|
||||||
"target": "agents",
|
"target": "agents",
|
||||||
"client": None,
|
"client": None,
|
||||||
"site": None,
|
"site": None,
|
||||||
@@ -566,6 +568,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "command",
|
"mode": "command",
|
||||||
|
"monType": "servers",
|
||||||
"target": "agents",
|
"target": "agents",
|
||||||
"client": None,
|
"client": None,
|
||||||
"site": None,
|
"site": None,
|
||||||
@@ -578,14 +581,13 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
payload = {
|
""" payload = {
|
||||||
"mode": "command",
|
"mode": "command",
|
||||||
|
"monType": "workstations",
|
||||||
"target": "client",
|
"target": "client",
|
||||||
"client": self.agent.client.id,
|
"client": self.agent.client.id,
|
||||||
"site": None,
|
"site": None,
|
||||||
"agentPKs": [
|
"agentPKs": [],
|
||||||
self.agent.pk,
|
|
||||||
],
|
|
||||||
"cmd": "gpupdate /force",
|
"cmd": "gpupdate /force",
|
||||||
"timeout": 300,
|
"timeout": 300,
|
||||||
"shell": "cmd",
|
"shell": "cmd",
|
||||||
@@ -593,10 +595,11 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300) """
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "command",
|
"mode": "command",
|
||||||
|
"monType": "all",
|
||||||
"target": "client",
|
"target": "client",
|
||||||
"client": self.agent.client.id,
|
"client": self.agent.client.id,
|
||||||
"site": self.agent.site.id,
|
"site": self.agent.site.id,
|
||||||
@@ -614,6 +617,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "scan",
|
"mode": "scan",
|
||||||
|
"monType": "all",
|
||||||
"target": "agents",
|
"target": "agents",
|
||||||
"client": None,
|
"client": None,
|
||||||
"site": None,
|
"site": None,
|
||||||
@@ -627,6 +631,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"mode": "install",
|
"mode": "install",
|
||||||
|
"monType": "all",
|
||||||
"target": "client",
|
"target": "client",
|
||||||
"client": self.agent.client.id,
|
"client": self.agent.client.id,
|
||||||
"site": None,
|
"site": None,
|
||||||
@@ -785,7 +790,62 @@ class TestAgentTasks(TacticalTestCase):
|
|||||||
self.assertEqual(salt_batch_async.call_count, 4)
|
self.assertEqual(salt_batch_async.call_count, 4)
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
self.assertEqual(ret.status, "SUCCESS")
|
||||||
|
|
||||||
@patch("agents.models.Agent.salt_api_async")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
|
def test_agent_update(self, nats_cmd):
|
||||||
|
from agents.tasks import agent_update
|
||||||
|
|
||||||
|
agent_noarch = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Error getting OS",
|
||||||
|
version="1.1.11",
|
||||||
|
)
|
||||||
|
r = agent_update(agent_noarch.pk)
|
||||||
|
self.assertEqual(r, "noarch")
|
||||||
|
self.assertEqual(
|
||||||
|
PendingAction.objects.filter(
|
||||||
|
agent=agent_noarch, action_type="agentupdate"
|
||||||
|
).count(),
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
|
||||||
|
agent64_111 = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.1.11",
|
||||||
|
)
|
||||||
|
|
||||||
|
r = agent_update(agent64_111.pk)
|
||||||
|
self.assertEqual(r, "created")
|
||||||
|
action = PendingAction.objects.get(agent__pk=agent64_111.pk)
|
||||||
|
self.assertEqual(action.action_type, "agentupdate")
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
self.assertEqual(action.details["url"], settings.DL_64)
|
||||||
|
self.assertEqual(
|
||||||
|
action.details["inno"], f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||||
|
)
|
||||||
|
self.assertEqual(action.details["version"], settings.LATEST_AGENT_VER)
|
||||||
|
|
||||||
|
agent64 = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.1.12",
|
||||||
|
)
|
||||||
|
nats_cmd.return_value = "ok"
|
||||||
|
r = agent_update(agent64.pk)
|
||||||
|
self.assertEqual(r, "created")
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{
|
||||||
|
"func": "agentupdate",
|
||||||
|
"payload": {
|
||||||
|
"url": settings.DL_64,
|
||||||
|
"version": settings.LATEST_AGENT_VER,
|
||||||
|
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wait=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
""" @patch("agents.models.Agent.salt_api_async")
|
||||||
@patch("agents.tasks.sleep", return_value=None)
|
@patch("agents.tasks.sleep", return_value=None)
|
||||||
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
|
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
|
||||||
# test 64bit golang agent
|
# test 64bit golang agent
|
||||||
@@ -888,4 +948,4 @@ class TestAgentTasks(TacticalTestCase):
|
|||||||
"url": OLD_32_PY_AGENT,
|
"url": OLD_32_PY_AGENT,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
self.assertEqual(ret.status, "SUCCESS") """
|
||||||
|
|||||||
@@ -32,7 +32,11 @@ from .serializers import (
|
|||||||
)
|
)
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from .tasks import uninstall_agent_task, send_agent_update_task
|
from .tasks import (
|
||||||
|
uninstall_agent_task,
|
||||||
|
send_agent_update_task,
|
||||||
|
run_script_email_results_task,
|
||||||
|
)
|
||||||
from winupdate.tasks import bulk_check_for_updates_task
|
from winupdate.tasks import bulk_check_for_updates_task
|
||||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||||
|
|
||||||
@@ -738,6 +742,21 @@ def run_script(request):
|
|||||||
if output == "wait":
|
if output == "wait":
|
||||||
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
|
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
|
||||||
return Response(r)
|
return Response(r)
|
||||||
|
elif output == "email":
|
||||||
|
if not pyver.parse(agent.version) >= pyver.parse("1.1.12"):
|
||||||
|
return notify_error("Requires agent version 1.1.12 or greater")
|
||||||
|
|
||||||
|
emails = (
|
||||||
|
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||||
|
)
|
||||||
|
run_script_email_results_task.delay(
|
||||||
|
agentpk=agent.pk,
|
||||||
|
scriptpk=script.pk,
|
||||||
|
nats_timeout=req_timeout,
|
||||||
|
nats_data=data,
|
||||||
|
emails=emails,
|
||||||
|
)
|
||||||
|
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||||
else:
|
else:
|
||||||
asyncio.run(agent.nats_cmd(data, wait=False))
|
asyncio.run(agent.nats_cmd(data, wait=False))
|
||||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||||
@@ -825,6 +844,11 @@ def bulk(request):
|
|||||||
else:
|
else:
|
||||||
return notify_error("Something went wrong")
|
return notify_error("Something went wrong")
|
||||||
|
|
||||||
|
if request.data["monType"] == "servers":
|
||||||
|
q = q.filter(monitoring_type="server")
|
||||||
|
elif request.data["monType"] == "workstations":
|
||||||
|
q = q.filter(monitoring_type="workstation")
|
||||||
|
|
||||||
minions = [agent.salt_id for agent in q]
|
minions = [agent.salt_id for agent in q]
|
||||||
agents = [agent.pk for agent in q]
|
agents = [agent.pk for agent in q]
|
||||||
|
|
||||||
|
|||||||
@@ -7,19 +7,25 @@ import django.db.models.deletion
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('checks', '0010_auto_20200922_1344'),
|
("checks", "0010_auto_20200922_1344"),
|
||||||
('alerts', '0002_auto_20200815_1618'),
|
("alerts", "0002_auto_20200815_1618"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='alert',
|
model_name="alert",
|
||||||
name='assigned_check',
|
name="assigned_check",
|
||||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='checks.check'),
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="alert",
|
||||||
|
to="checks.check",
|
||||||
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='alert',
|
model_name="alert",
|
||||||
name='alert_time',
|
name="alert_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -2,4 +2,4 @@ from django.apps import AppConfig
|
|||||||
|
|
||||||
|
|
||||||
class Apiv2Config(AppConfig):
|
class Apiv2Config(AppConfig):
|
||||||
name = 'apiv2'
|
name = "apiv2"
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
def test_sysinfo(self):
|
def test_sysinfo(self):
|
||||||
# TODO replace this with golang wmi sample data
|
# TODO replace this with golang wmi sample data
|
||||||
|
|
||||||
url = f"/api/v3/sysinfo/"
|
url = "/api/v3/sysinfo/"
|
||||||
with open(
|
with open(
|
||||||
os.path.join(
|
os.path.join(
|
||||||
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
|
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
|
||||||
@@ -77,7 +77,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
def test_hello_patch(self):
|
def test_hello_patch(self):
|
||||||
url = f"/api/v3/hello/"
|
url = "/api/v3/hello/"
|
||||||
payload = {
|
payload = {
|
||||||
"agent_id": self.agent.agent_id,
|
"agent_id": self.agent.agent_id,
|
||||||
"logged_in_username": "None",
|
"logged_in_username": "None",
|
||||||
@@ -92,3 +92,12 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
@patch("agents.tasks.install_salt_task.delay")
|
||||||
|
def test_install_salt(self, mock_task):
|
||||||
|
url = f"/api/v3/{self.agent.agent_id}/installsalt/"
|
||||||
|
r = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
mock_task.assert_called_with(self.agent.pk)
|
||||||
|
|
||||||
|
self.check_not_authenticated("get", url)
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from django.urls import path
|
|||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
|
path("checkin/", views.CheckIn.as_view()),
|
||||||
path("hello/", views.Hello.as_view()),
|
path("hello/", views.Hello.as_view()),
|
||||||
path("checkrunner/", views.CheckRunner.as_view()),
|
path("checkrunner/", views.CheckRunner.as_view()),
|
||||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||||
@@ -16,4 +17,5 @@ urlpatterns = [
|
|||||||
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
|
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
|
||||||
path("software/", views.Software.as_view()),
|
path("software/", views.Software.as_view()),
|
||||||
path("installer/", views.Installer.as_view()),
|
path("installer/", views.Installer.as_view()),
|
||||||
|
path("<str:agentid>/installsalt/", views.InstallSalt.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ from agents.tasks import (
|
|||||||
agent_recovery_email_task,
|
agent_recovery_email_task,
|
||||||
agent_recovery_sms_task,
|
agent_recovery_sms_task,
|
||||||
sync_salt_modules_task,
|
sync_salt_modules_task,
|
||||||
|
install_salt_task,
|
||||||
)
|
)
|
||||||
from winupdate.tasks import check_for_updates_task
|
from winupdate.tasks import check_for_updates_task
|
||||||
from software.tasks import install_chocolatey
|
from software.tasks import install_chocolatey
|
||||||
@@ -39,7 +40,112 @@ from tacticalrmm.utils import notify_error, reload_nats, filter_software, Softwa
|
|||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
|
class CheckIn(APIView):
|
||||||
|
"""
|
||||||
|
The agent's checkin endpoint
|
||||||
|
patch: called every 45 to 110 seconds, handles agent updates and recovery
|
||||||
|
put: called every 5 to 10 minutes, handles basic system info
|
||||||
|
post: called once on windows service startup
|
||||||
|
"""
|
||||||
|
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def patch(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
agent.version = request.data["version"]
|
||||||
|
agent.last_seen = djangotime.now()
|
||||||
|
agent.save(update_fields=["version", "last_seen"])
|
||||||
|
|
||||||
|
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
|
||||||
|
last_outage = agent.agentoutages.last()
|
||||||
|
last_outage.recovery_time = djangotime.now()
|
||||||
|
last_outage.save(update_fields=["recovery_time"])
|
||||||
|
|
||||||
|
if agent.overdue_email_alert:
|
||||||
|
agent_recovery_email_task.delay(pk=last_outage.pk)
|
||||||
|
if agent.overdue_text_alert:
|
||||||
|
agent_recovery_sms_task.delay(pk=last_outage.pk)
|
||||||
|
|
||||||
|
recovery = agent.recoveryactions.filter(last_run=None).last()
|
||||||
|
if recovery is not None:
|
||||||
|
recovery.last_run = djangotime.now()
|
||||||
|
recovery.save(update_fields=["last_run"])
|
||||||
|
return Response(recovery.send())
|
||||||
|
|
||||||
|
# handle agent update
|
||||||
|
if agent.pendingactions.filter(
|
||||||
|
action_type="agentupdate", status="pending"
|
||||||
|
).exists():
|
||||||
|
update = agent.pendingactions.filter(
|
||||||
|
action_type="agentupdate", status="pending"
|
||||||
|
).last()
|
||||||
|
update.status = "completed"
|
||||||
|
update.save(update_fields=["status"])
|
||||||
|
return Response(update.details)
|
||||||
|
|
||||||
|
# get any pending actions
|
||||||
|
if agent.pendingactions.filter(status="pending").exists():
|
||||||
|
agent.handle_pending_actions()
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def put(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
if "disks" in request.data.keys():
|
||||||
|
disks = request.data["disks"]
|
||||||
|
new = []
|
||||||
|
# python agent
|
||||||
|
if isinstance(disks, dict):
|
||||||
|
for k, v in disks.items():
|
||||||
|
new.append(v)
|
||||||
|
else:
|
||||||
|
# golang agent
|
||||||
|
for disk in disks:
|
||||||
|
tmp = {}
|
||||||
|
for k, v in disk.items():
|
||||||
|
tmp["device"] = disk["device"]
|
||||||
|
tmp["fstype"] = disk["fstype"]
|
||||||
|
tmp["total"] = bytes2human(disk["total"])
|
||||||
|
tmp["used"] = bytes2human(disk["used"])
|
||||||
|
tmp["free"] = bytes2human(disk["free"])
|
||||||
|
tmp["percent"] = int(disk["percent"])
|
||||||
|
new.append(tmp)
|
||||||
|
|
||||||
|
serializer.save(disks=new)
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
if "logged_in_username" in request.data.keys():
|
||||||
|
if request.data["logged_in_username"] != "None":
|
||||||
|
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
serializer.save()
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
|
||||||
|
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save(last_seen=djangotime.now())
|
||||||
|
|
||||||
|
sync_salt_modules_task.delay(agent.pk)
|
||||||
|
check_for_updates_task.apply_async(
|
||||||
|
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
|
||||||
|
)
|
||||||
|
|
||||||
|
if not agent.choco_installed:
|
||||||
|
install_chocolatey.delay(agent.pk, wait=True)
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class Hello(APIView):
|
class Hello(APIView):
|
||||||
|
#### DEPRECATED, for agents <= 1.1.9 ####
|
||||||
"""
|
"""
|
||||||
The agent's checkin endpoint
|
The agent's checkin endpoint
|
||||||
patch: called every 30 to 120 seconds
|
patch: called every 30 to 120 seconds
|
||||||
@@ -160,16 +266,6 @@ class CheckRunner(APIView):
|
|||||||
check.save(update_fields=["last_run"])
|
check.save(update_fields=["last_run"])
|
||||||
status = check.handle_checkv2(request.data)
|
status = check.handle_checkv2(request.data)
|
||||||
|
|
||||||
# create audit entry
|
|
||||||
AuditLog.objects.create(
|
|
||||||
username=check.agent.hostname,
|
|
||||||
agent=check.agent.hostname,
|
|
||||||
object_type="agent",
|
|
||||||
action="check_run",
|
|
||||||
message=f"{check.readable_desc} was run on {check.agent.hostname}. Status: {status}",
|
|
||||||
after_value=Check.serialize(check),
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(status)
|
return Response(status)
|
||||||
|
|
||||||
|
|
||||||
@@ -521,3 +617,13 @@ class Installer(APIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class InstallSalt(APIView):
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def get(self, request, agentid):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
|
install_salt_task.delay(agent.pk)
|
||||||
|
return Response("ok")
|
||||||
|
|||||||
@@ -6,11 +6,11 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('automation', '0005_auto_20200922_1344'),
|
("automation", "0005_auto_20200922_1344"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.DeleteModel(
|
migrations.DeleteModel(
|
||||||
name='PolicyExclusions',
|
name="PolicyExclusions",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ class Policy(BaseAuditModel):
|
|||||||
default_policy = CoreSettings.objects.first().server_policy
|
default_policy = CoreSettings.objects.first().server_policy
|
||||||
client_policy = client.server_policy
|
client_policy = client.server_policy
|
||||||
site_policy = site.server_policy
|
site_policy = site.server_policy
|
||||||
else:
|
elif agent.monitoring_type == "workstation":
|
||||||
default_policy = CoreSettings.objects.first().workstation_policy
|
default_policy = CoreSettings.objects.first().workstation_policy
|
||||||
client_policy = client.workstation_policy
|
client_policy = client.workstation_policy
|
||||||
site_policy = site.workstation_policy
|
site_policy = site.workstation_policy
|
||||||
@@ -132,7 +132,7 @@ class Policy(BaseAuditModel):
|
|||||||
default_policy = CoreSettings.objects.first().server_policy
|
default_policy = CoreSettings.objects.first().server_policy
|
||||||
client_policy = client.server_policy
|
client_policy = client.server_policy
|
||||||
site_policy = site.server_policy
|
site_policy = site.server_policy
|
||||||
else:
|
elif agent.monitoring_type == "workstation":
|
||||||
default_policy = CoreSettings.objects.first().workstation_policy
|
default_policy = CoreSettings.objects.first().workstation_policy
|
||||||
client_policy = client.workstation_policy
|
client_policy = client.workstation_policy
|
||||||
site_policy = site.workstation_policy
|
site_policy = site.workstation_policy
|
||||||
|
|||||||
@@ -19,7 +19,17 @@ def generate_agent_checks_from_policies_task(
|
|||||||
):
|
):
|
||||||
|
|
||||||
policy = Policy.objects.get(pk=policypk)
|
policy = Policy.objects.get(pk=policypk)
|
||||||
for agent in policy.related_agents():
|
|
||||||
|
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||||
|
agents = Agent.objects.all()
|
||||||
|
elif policy.is_default_server_policy:
|
||||||
|
agents = Agent.objects.filter(monitoring_type="server")
|
||||||
|
elif policy.is_default_workstation_policy:
|
||||||
|
agents = Agent.objects.filter(monitoring_type="workstation")
|
||||||
|
else:
|
||||||
|
agents = policy.related_agents()
|
||||||
|
|
||||||
|
for agent in agents:
|
||||||
agent.generate_checks_from_policies(clear=clear)
|
agent.generate_checks_from_policies(clear=clear)
|
||||||
if create_tasks:
|
if create_tasks:
|
||||||
agent.generate_tasks_from_policies(
|
agent.generate_tasks_from_policies(
|
||||||
@@ -86,7 +96,17 @@ def update_policy_check_fields_task(checkpk):
|
|||||||
def generate_agent_tasks_from_policies_task(policypk, clear=False):
|
def generate_agent_tasks_from_policies_task(policypk, clear=False):
|
||||||
|
|
||||||
policy = Policy.objects.get(pk=policypk)
|
policy = Policy.objects.get(pk=policypk)
|
||||||
for agent in policy.related_agents():
|
|
||||||
|
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||||
|
agents = Agent.objects.all()
|
||||||
|
elif policy.is_default_server_policy:
|
||||||
|
agents = Agent.objects.filter(monitoring_type="server")
|
||||||
|
elif policy.is_default_workstation_policy:
|
||||||
|
agents = Agent.objects.filter(monitoring_type="workstation")
|
||||||
|
else:
|
||||||
|
agents = policy.related_agents()
|
||||||
|
|
||||||
|
for agent in agents:
|
||||||
agent.generate_tasks_from_policies(clear=clear)
|
agent.generate_tasks_from_policies(clear=clear)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,13 +6,13 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('autotasks', '0008_auto_20201030_1515'),
|
("autotasks", "0008_auto_20201030_1515"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='automatedtask',
|
model_name="automatedtask",
|
||||||
name='run_time_bit_weekdays',
|
name="run_time_bit_weekdays",
|
||||||
field=models.IntegerField(blank=True, null=True),
|
field=models.IntegerField(blank=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -176,6 +176,12 @@ def delete_win_task_schedule(pk, pending_action=False):
|
|||||||
pendingaction.status = "completed"
|
pendingaction.status = "completed"
|
||||||
pendingaction.save(update_fields=["status"])
|
pendingaction.save(update_fields=["status"])
|
||||||
|
|
||||||
|
# complete any other pending actions on agent with same task_id
|
||||||
|
for action in task.agent.pendingactions.all():
|
||||||
|
if action.details["task_id"] == task.id:
|
||||||
|
action.status = "completed"
|
||||||
|
action.save()
|
||||||
|
|
||||||
task.delete()
|
task.delete()
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from .models import Check
|
from .models import Check, CheckHistory
|
||||||
|
|
||||||
admin.site.register(Check)
|
admin.site.register(Check)
|
||||||
|
admin.site.register(CheckHistory)
|
||||||
|
|||||||
30
api/tacticalrmm/checks/migrations/0011_check_run_history.py
Normal file
30
api/tacticalrmm/checks/migrations/0011_check_run_history.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-09 02:56
|
||||||
|
|
||||||
|
import django.contrib.postgres.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0010_auto_20200922_1344"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="check",
|
||||||
|
name="run_history",
|
||||||
|
field=django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=models.PositiveIntegerField(),
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
size=None,
|
||||||
|
),
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
null=True,
|
||||||
|
size=None,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
39
api/tacticalrmm/checks/migrations/0011_checkhistory.py
Normal file
39
api/tacticalrmm/checks/migrations/0011_checkhistory.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-09 21:36
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0010_auto_20200922_1344"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="CheckHistory",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
auto_created=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
verbose_name="ID",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("x", models.DateTimeField()),
|
||||||
|
("y", models.PositiveIntegerField()),
|
||||||
|
("results", models.JSONField(blank=True, null=True)),
|
||||||
|
(
|
||||||
|
"check_history",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="check_history",
|
||||||
|
to="checks.check",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
18
api/tacticalrmm/checks/migrations/0012_auto_20210110_0503.py
Normal file
18
api/tacticalrmm/checks/migrations/0012_auto_20210110_0503.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-10 05:03
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0011_checkhistory"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="checkhistory",
|
||||||
|
name="y",
|
||||||
|
field=models.PositiveIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py
Normal file
18
api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-10 05:05
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0012_auto_20210110_0503"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="checkhistory",
|
||||||
|
name="y",
|
||||||
|
field=models.PositiveIntegerField(null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0013_auto_20210110_0505"),
|
||||||
|
("checks", "0011_check_run_history"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
||||||
27
api/tacticalrmm/checks/migrations/0015_auto_20210110_1808.py
Normal file
27
api/tacticalrmm/checks/migrations/0015_auto_20210110_1808.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0014_merge_20210110_1808"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="check",
|
||||||
|
name="run_history",
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="checkhistory",
|
||||||
|
name="x",
|
||||||
|
field=models.DateTimeField(auto_now_add=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="checkhistory",
|
||||||
|
name="y",
|
||||||
|
field=models.PositiveIntegerField(blank=True, default=None, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -3,12 +3,13 @@ import string
|
|||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import pytz
|
import pytz
|
||||||
from statistics import mean
|
from statistics import mean, mode
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||||
|
from rest_framework.fields import JSONField
|
||||||
|
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
@@ -214,6 +215,10 @@ class Check(BaseAuditModel):
|
|||||||
"modified_time",
|
"modified_time",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def add_check_history(self, value):
|
||||||
|
if self.check_type in ["memory", "cpuload", "diskspace"]:
|
||||||
|
CheckHistory.objects.create(check_history=self, y=value)
|
||||||
|
|
||||||
def handle_checkv2(self, data):
|
def handle_checkv2(self, data):
|
||||||
# cpuload or mem checks
|
# cpuload or mem checks
|
||||||
if self.check_type == "cpuload" or self.check_type == "memory":
|
if self.check_type == "cpuload" or self.check_type == "memory":
|
||||||
@@ -232,6 +237,9 @@ class Check(BaseAuditModel):
|
|||||||
else:
|
else:
|
||||||
self.status = "passing"
|
self.status = "passing"
|
||||||
|
|
||||||
|
# add check history
|
||||||
|
self.add_check_history(data["percent"])
|
||||||
|
|
||||||
# diskspace checks
|
# diskspace checks
|
||||||
elif self.check_type == "diskspace":
|
elif self.check_type == "diskspace":
|
||||||
if data["exists"]:
|
if data["exists"]:
|
||||||
@@ -245,6 +253,9 @@ class Check(BaseAuditModel):
|
|||||||
self.status = "passing"
|
self.status = "passing"
|
||||||
|
|
||||||
self.more_info = f"Total: {total}B, Free: {free}B"
|
self.more_info = f"Total: {total}B, Free: {free}B"
|
||||||
|
|
||||||
|
# add check history
|
||||||
|
self.add_check_history(percent_used)
|
||||||
else:
|
else:
|
||||||
self.status = "failing"
|
self.status = "failing"
|
||||||
self.more_info = f"Disk {self.disk} does not exist"
|
self.more_info = f"Disk {self.disk} does not exist"
|
||||||
@@ -645,3 +656,17 @@ class Check(BaseAuditModel):
|
|||||||
body = subject
|
body = subject
|
||||||
|
|
||||||
CORE.send_sms(body)
|
CORE.send_sms(body)
|
||||||
|
|
||||||
|
|
||||||
|
class CheckHistory(models.Model):
|
||||||
|
check_history = models.ForeignKey(
|
||||||
|
Check,
|
||||||
|
related_name="check_history",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
x = models.DateTimeField(auto_now_add=True)
|
||||||
|
y = models.PositiveIntegerField(null=True, blank=True, default=None)
|
||||||
|
results = models.JSONField(null=True, blank=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.check_history.readable_desc
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import validators as _v
|
import validators as _v
|
||||||
|
import pytz
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from .models import Check
|
from .models import Check, CheckHistory
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
from scripts.serializers import ScriptSerializer, ScriptCheckSerializer
|
from scripts.serializers import ScriptSerializer, ScriptCheckSerializer
|
||||||
|
|
||||||
@@ -65,6 +65,26 @@ class CheckSerializer(serializers.ModelSerializer):
|
|||||||
"Please enter a valid IP address or domain name"
|
"Please enter a valid IP address or domain name"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if check_type == "cpuload" and not self.instance:
|
||||||
|
if (
|
||||||
|
Check.objects.filter(**self.context, check_type="cpuload")
|
||||||
|
.exclude(managed_by_policy=True)
|
||||||
|
.exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"A cpuload check for this agent already exists"
|
||||||
|
)
|
||||||
|
|
||||||
|
if check_type == "memory" and not self.instance:
|
||||||
|
if (
|
||||||
|
Check.objects.filter(**self.context, check_type="memory")
|
||||||
|
.exclude(managed_by_policy=True)
|
||||||
|
.exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"A memory check for this agent already exists"
|
||||||
|
)
|
||||||
|
|
||||||
return val
|
return val
|
||||||
|
|
||||||
|
|
||||||
@@ -217,3 +237,15 @@ class CheckResultsSerializer(serializers.ModelSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
model = Check
|
model = Check
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class CheckHistorySerializer(serializers.ModelSerializer):
|
||||||
|
x = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
def get_x(self, obj):
|
||||||
|
return obj.x.astimezone(pytz.timezone(self.context["timezone"])).isoformat()
|
||||||
|
|
||||||
|
# used for return large amounts of graph data
|
||||||
|
class Meta:
|
||||||
|
model = CheckHistory
|
||||||
|
fields = ("x", "y")
|
||||||
|
|||||||
@@ -5,8 +5,6 @@ from time import sleep
|
|||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def handle_check_email_alert_task(pk):
|
def handle_check_email_alert_task(pk):
|
||||||
@@ -56,3 +54,15 @@ def handle_check_sms_alert_task(pk):
|
|||||||
check.save(update_fields=["text_sent"])
|
check.save(update_fields=["text_sent"])
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def prune_check_history(older_than_days: int) -> str:
|
||||||
|
from .models import CheckHistory
|
||||||
|
|
||||||
|
CheckHistory.objects.filter(
|
||||||
|
x__lt=djangotime.make_aware(dt.datetime.today())
|
||||||
|
- djangotime.timedelta(days=older_than_days)
|
||||||
|
).delete()
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
from checks.models import CheckHistory
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
from .serializers import CheckSerializer
|
from .serializers import CheckSerializer
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
@@ -8,6 +10,7 @@ from itertools import cycle
|
|||||||
class TestCheckViews(TacticalTestCase):
|
class TestCheckViews(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.authenticate()
|
self.authenticate()
|
||||||
|
self.setup_coresettings()
|
||||||
|
|
||||||
def test_get_disk_check(self):
|
def test_get_disk_check(self):
|
||||||
# setup data
|
# setup data
|
||||||
@@ -55,6 +58,52 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
resp = self.client.post(url, invalid_payload, format="json")
|
resp = self.client.post(url, invalid_payload, format="json")
|
||||||
self.assertEqual(resp.status_code, 400)
|
self.assertEqual(resp.status_code, 400)
|
||||||
|
|
||||||
|
def test_add_cpuload_check(self):
|
||||||
|
url = "/checks/checks/"
|
||||||
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
payload = {
|
||||||
|
"pk": agent.pk,
|
||||||
|
"check": {
|
||||||
|
"check_type": "cpuload",
|
||||||
|
"threshold": 66,
|
||||||
|
"fails_b4_alert": 9,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.post(url, payload, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
payload["threshold"] = 87
|
||||||
|
resp = self.client.post(url, payload, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 400)
|
||||||
|
self.assertEqual(
|
||||||
|
resp.json()["non_field_errors"][0],
|
||||||
|
"A cpuload check for this agent already exists",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_add_memory_check(self):
|
||||||
|
url = "/checks/checks/"
|
||||||
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
payload = {
|
||||||
|
"pk": agent.pk,
|
||||||
|
"check": {
|
||||||
|
"check_type": "memory",
|
||||||
|
"threshold": 78,
|
||||||
|
"fails_b4_alert": 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.post(url, payload, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
payload["threshold"] = 55
|
||||||
|
resp = self.client.post(url, payload, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 400)
|
||||||
|
self.assertEqual(
|
||||||
|
resp.json()["non_field_errors"][0],
|
||||||
|
"A memory check for this agent already exists",
|
||||||
|
)
|
||||||
|
|
||||||
def test_get_policy_disk_check(self):
|
def test_get_policy_disk_check(self):
|
||||||
# setup data
|
# setup data
|
||||||
policy = baker.make("automation.Policy")
|
policy = baker.make("automation.Policy")
|
||||||
@@ -134,3 +183,69 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url_a)
|
self.check_not_authenticated("patch", url_a)
|
||||||
|
|
||||||
|
def test_get_check_history(self):
|
||||||
|
# setup data
|
||||||
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||||
|
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
|
||||||
|
check_history_data = baker.make(
|
||||||
|
"checks.CheckHistory",
|
||||||
|
check_history=check,
|
||||||
|
_quantity=30,
|
||||||
|
)
|
||||||
|
|
||||||
|
# need to manually set the date back 35 days
|
||||||
|
for check_history in check_history_data:
|
||||||
|
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
|
||||||
|
check_history.save()
|
||||||
|
|
||||||
|
# test invalid check pk
|
||||||
|
resp = self.client.patch("/checks/history/500/", format="json")
|
||||||
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
|
url = f"/checks/history/{check.id}/"
|
||||||
|
|
||||||
|
# test with timeFilter last 30 days
|
||||||
|
data = {"timeFilter": 30}
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(len(resp.data), 30)
|
||||||
|
|
||||||
|
# test with timeFilter equal to 0
|
||||||
|
data = {"timeFilter": 0}
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(len(resp.data), 60)
|
||||||
|
|
||||||
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCheckTasks(TacticalTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.setup_coresettings()
|
||||||
|
|
||||||
|
def test_prune_check_history(self):
|
||||||
|
from .tasks import prune_check_history
|
||||||
|
|
||||||
|
# setup data
|
||||||
|
check = baker.make_recipe("checks.diskspace_check")
|
||||||
|
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
|
||||||
|
check_history_data = baker.make(
|
||||||
|
"checks.CheckHistory",
|
||||||
|
check_history=check,
|
||||||
|
_quantity=30,
|
||||||
|
)
|
||||||
|
|
||||||
|
# need to manually set the date back 35 days
|
||||||
|
for check_history in check_history_data:
|
||||||
|
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
|
||||||
|
check_history.save()
|
||||||
|
|
||||||
|
# prune data 30 days old
|
||||||
|
prune_check_history(30)
|
||||||
|
self.assertEqual(CheckHistory.objects.count(), 30)
|
||||||
|
|
||||||
|
# prune all Check history Data
|
||||||
|
prune_check_history(0)
|
||||||
|
self.assertEqual(CheckHistory.objects.count(), 0)
|
||||||
|
|||||||
@@ -7,4 +7,5 @@ urlpatterns = [
|
|||||||
path("<pk>/loadchecks/", views.load_checks),
|
path("<pk>/loadchecks/", views.load_checks),
|
||||||
path("getalldisks/", views.get_disks_for_policies),
|
path("getalldisks/", views.get_disks_for_policies),
|
||||||
path("runchecks/<pk>/", views.run_checks),
|
path("runchecks/<pk>/", views.run_checks),
|
||||||
|
path("history/<int:checkpk>/", views.CheckHistory.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
|
from django.db.models import Q
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
|
from datetime import datetime as dt
|
||||||
|
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@@ -13,7 +17,7 @@ from automation.models import Policy
|
|||||||
from .models import Check
|
from .models import Check
|
||||||
from scripts.models import Script
|
from scripts.models import Script
|
||||||
|
|
||||||
from .serializers import CheckSerializer
|
from .serializers import CheckSerializer, CheckHistorySerializer
|
||||||
|
|
||||||
|
|
||||||
from automation.tasks import (
|
from automation.tasks import (
|
||||||
@@ -135,6 +139,29 @@ class GetUpdateDeleteCheck(APIView):
|
|||||||
return Response(f"{check.readable_desc} was deleted!")
|
return Response(f"{check.readable_desc} was deleted!")
|
||||||
|
|
||||||
|
|
||||||
|
class CheckHistory(APIView):
|
||||||
|
def patch(self, request, checkpk):
|
||||||
|
check = get_object_or_404(Check, pk=checkpk)
|
||||||
|
|
||||||
|
timeFilter = Q()
|
||||||
|
|
||||||
|
if "timeFilter" in request.data:
|
||||||
|
if request.data["timeFilter"] != 0:
|
||||||
|
timeFilter = Q(
|
||||||
|
x__lte=djangotime.make_aware(dt.today()),
|
||||||
|
x__gt=djangotime.make_aware(dt.today())
|
||||||
|
- djangotime.timedelta(days=request.data["timeFilter"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
check_history = check.check_history.filter(timeFilter).order_by("-x")
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
CheckHistorySerializer(
|
||||||
|
check_history, context={"timezone": check.agent.timezone}, many=True
|
||||||
|
).data
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
def run_checks(request, pk):
|
def run_checks(request, pk):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
|
|||||||
@@ -6,48 +6,48 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('clients', '0004_auto_20200821_2115'),
|
("clients", "0004_auto_20200821_2115"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='client',
|
model_name="client",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='client',
|
model_name="client",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='client',
|
model_name="client",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='client',
|
model_name="client",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
field=models.DateTimeField(auto_now=True, null=True),
|
field=models.DateTimeField(auto_now=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='site',
|
model_name="site",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='site',
|
model_name="site",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='site',
|
model_name="site",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='site',
|
model_name="site",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
field=models.DateTimeField(auto_now=True, null=True),
|
field=models.DateTimeField(auto_now=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -8,24 +8,67 @@ import uuid
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('knox', '0007_auto_20190111_0542'),
|
("knox", "0007_auto_20190111_0542"),
|
||||||
('clients', '0005_auto_20200922_1344'),
|
("clients", "0005_auto_20200922_1344"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name='Deployment',
|
name="Deployment",
|
||||||
fields=[
|
fields=[
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
(
|
||||||
('uid', models.UUIDField(default=uuid.uuid4, editable=False)),
|
"id",
|
||||||
('mon_type', models.CharField(choices=[('server', 'Server'), ('workstation', 'Workstation')], default='server', max_length=255)),
|
models.AutoField(
|
||||||
('arch', models.CharField(choices=[('64', '64 bit'), ('32', '32 bit')], default='64', max_length=255)),
|
auto_created=True,
|
||||||
('expiry', models.DateTimeField(blank=True, null=True)),
|
primary_key=True,
|
||||||
('token_key', models.CharField(max_length=255)),
|
serialize=False,
|
||||||
('install_flags', models.JSONField(blank=True, null=True)),
|
verbose_name="ID",
|
||||||
('auth_token', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deploytokens', to='knox.authtoken')),
|
),
|
||||||
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deployclients', to='clients.client')),
|
),
|
||||||
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deploysites', to='clients.site')),
|
("uid", models.UUIDField(default=uuid.uuid4, editable=False)),
|
||||||
|
(
|
||||||
|
"mon_type",
|
||||||
|
models.CharField(
|
||||||
|
choices=[("server", "Server"), ("workstation", "Workstation")],
|
||||||
|
default="server",
|
||||||
|
max_length=255,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"arch",
|
||||||
|
models.CharField(
|
||||||
|
choices=[("64", "64 bit"), ("32", "32 bit")],
|
||||||
|
default="64",
|
||||||
|
max_length=255,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("expiry", models.DateTimeField(blank=True, null=True)),
|
||||||
|
("token_key", models.CharField(max_length=255)),
|
||||||
|
("install_flags", models.JSONField(blank=True, null=True)),
|
||||||
|
(
|
||||||
|
"auth_token",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="deploytokens",
|
||||||
|
to="knox.authtoken",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"client",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="deployclients",
|
||||||
|
to="clients.client",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"site",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="deploysites",
|
||||||
|
to="clients.site",
|
||||||
|
),
|
||||||
|
),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,18 +6,18 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('clients', '0006_deployment'),
|
("clients", "0006_deployment"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RenameField(
|
migrations.RenameField(
|
||||||
model_name='client',
|
model_name="client",
|
||||||
old_name='client',
|
old_name="client",
|
||||||
new_name='name',
|
new_name="name",
|
||||||
),
|
),
|
||||||
migrations.RenameField(
|
migrations.RenameField(
|
||||||
model_name='site',
|
model_name="site",
|
||||||
old_name='site',
|
old_name="site",
|
||||||
new_name='name',
|
new_name="name",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,16 +6,16 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('clients', '0007_auto_20201102_1920'),
|
("clients", "0007_auto_20201102_1920"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterModelOptions(
|
migrations.AlterModelOptions(
|
||||||
name='client',
|
name="client",
|
||||||
options={'ordering': ('name',)},
|
options={"ordering": ("name",)},
|
||||||
),
|
),
|
||||||
migrations.AlterModelOptions(
|
migrations.AlterModelOptions(
|
||||||
name='site',
|
name="site",
|
||||||
options={'ordering': ('name',)},
|
options={"ordering": ("name",)},
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -38,7 +38,6 @@ class Client(BaseAuditModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def has_failing_checks(self):
|
def has_failing_checks(self):
|
||||||
|
|
||||||
agents = (
|
agents = (
|
||||||
Agent.objects.only(
|
Agent.objects.only(
|
||||||
"pk",
|
"pk",
|
||||||
@@ -50,14 +49,17 @@ class Client(BaseAuditModel):
|
|||||||
.filter(site__client=self)
|
.filter(site__client=self)
|
||||||
.prefetch_related("agentchecks")
|
.prefetch_related("agentchecks")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
failing = 0
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
if agent.checks["has_failing_checks"]:
|
if agent.checks["has_failing_checks"]:
|
||||||
return True
|
failing += 1
|
||||||
|
|
||||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||||
return agent.status == "overdue"
|
if agent.status == "overdue":
|
||||||
|
failing += 1
|
||||||
|
|
||||||
return False
|
return failing > 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(client):
|
def serialize(client):
|
||||||
@@ -98,7 +100,6 @@ class Site(BaseAuditModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def has_failing_checks(self):
|
def has_failing_checks(self):
|
||||||
|
|
||||||
agents = (
|
agents = (
|
||||||
Agent.objects.only(
|
Agent.objects.only(
|
||||||
"pk",
|
"pk",
|
||||||
@@ -110,14 +111,17 @@ class Site(BaseAuditModel):
|
|||||||
.filter(site=self)
|
.filter(site=self)
|
||||||
.prefetch_related("agentchecks")
|
.prefetch_related("agentchecks")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
failing = 0
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
if agent.checks["has_failing_checks"]:
|
if agent.checks["has_failing_checks"]:
|
||||||
return True
|
failing += 1
|
||||||
|
|
||||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||||
return agent.status == "overdue"
|
if agent.status == "overdue":
|
||||||
|
failing += 1
|
||||||
|
|
||||||
return False
|
return failing > 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(site):
|
def serialize(site):
|
||||||
|
|||||||
@@ -192,7 +192,7 @@ class GenerateAgent(APIView):
|
|||||||
if not os.path.exists(go_bin):
|
if not os.path.exists(go_bin):
|
||||||
return notify_error("Missing golang")
|
return notify_error("Missing golang")
|
||||||
|
|
||||||
api = f"{request.scheme}://{request.get_host()}"
|
api = f"https://{request.get_host()}"
|
||||||
inno = (
|
inno = (
|
||||||
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||||
if d.arch == "64"
|
if d.arch == "64"
|
||||||
|
|||||||
@@ -58,6 +58,7 @@ func main() {
|
|||||||
debugLog := flag.String("log", "", "Verbose output")
|
debugLog := flag.String("log", "", "Verbose output")
|
||||||
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
|
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
|
||||||
noSalt := flag.Bool("nosalt", false, "Does not install salt")
|
noSalt := flag.Bool("nosalt", false, "Does not install salt")
|
||||||
|
silent := flag.Bool("silent", false, "Do not popup any message boxes during installation")
|
||||||
cert := flag.String("cert", "", "Path to ca.pem")
|
cert := flag.String("cert", "", "Path to ca.pem")
|
||||||
timeout := flag.String("timeout", "", "Timeout for subprocess calls")
|
timeout := flag.String("timeout", "", "Timeout for subprocess calls")
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
@@ -78,7 +79,11 @@ func main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if debug {
|
if debug {
|
||||||
cmdArgs = append(cmdArgs, "--log", "DEBUG")
|
cmdArgs = append(cmdArgs, "-log", "debug")
|
||||||
|
}
|
||||||
|
|
||||||
|
if *silent {
|
||||||
|
cmdArgs = append(cmdArgs, "-silent")
|
||||||
}
|
}
|
||||||
|
|
||||||
if *noSalt {
|
if *noSalt {
|
||||||
@@ -86,27 +91,27 @@ func main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(strings.TrimSpace(*localMesh)) != 0 {
|
if len(strings.TrimSpace(*localMesh)) != 0 {
|
||||||
cmdArgs = append(cmdArgs, "--local-mesh", *localMesh)
|
cmdArgs = append(cmdArgs, "-local-mesh", *localMesh)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(strings.TrimSpace(*cert)) != 0 {
|
if len(strings.TrimSpace(*cert)) != 0 {
|
||||||
cmdArgs = append(cmdArgs, "--cert", *cert)
|
cmdArgs = append(cmdArgs, "-cert", *cert)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(strings.TrimSpace(*timeout)) != 0 {
|
if len(strings.TrimSpace(*timeout)) != 0 {
|
||||||
cmdArgs = append(cmdArgs, "--timeout", *timeout)
|
cmdArgs = append(cmdArgs, "-timeout", *timeout)
|
||||||
}
|
}
|
||||||
|
|
||||||
if Rdp == "1" {
|
if Rdp == "1" {
|
||||||
cmdArgs = append(cmdArgs, "--rdp")
|
cmdArgs = append(cmdArgs, "-rdp")
|
||||||
}
|
}
|
||||||
|
|
||||||
if Ping == "1" {
|
if Ping == "1" {
|
||||||
cmdArgs = append(cmdArgs, "--ping")
|
cmdArgs = append(cmdArgs, "-ping")
|
||||||
}
|
}
|
||||||
|
|
||||||
if Power == "1" {
|
if Power == "1" {
|
||||||
cmdArgs = append(cmdArgs, "--power")
|
cmdArgs = append(cmdArgs, "-power")
|
||||||
}
|
}
|
||||||
|
|
||||||
if debug {
|
if debug {
|
||||||
|
|||||||
@@ -6,13 +6,13 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('core', '0008_auto_20200910_1434'),
|
("core", "0008_auto_20200910_1434"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='agent_auto_update',
|
name="agent_auto_update",
|
||||||
field=models.BooleanField(default=True),
|
field=models.BooleanField(default=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('core', '0009_coresettings_agent_auto_update'),
|
("core", "0009_coresettings_agent_auto_update"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='created_by',
|
name="created_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='created_time',
|
name="created_time",
|
||||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='modified_by',
|
name="modified_by",
|
||||||
field=models.CharField(blank=True, max_length=100, null=True),
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='modified_time',
|
name="modified_time",
|
||||||
field=models.DateTimeField(auto_now=True, null=True),
|
field=models.DateTimeField(auto_now=True, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -7,28 +7,34 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('core', '0010_auto_20201002_1257'),
|
("core", "0010_auto_20201002_1257"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='sms_alert_recipients',
|
name="sms_alert_recipients",
|
||||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None),
|
field=django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
null=True,
|
||||||
|
size=None,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='twilio_account_sid',
|
name="twilio_account_sid",
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='twilio_auth_token',
|
name="twilio_auth_token",
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='coresettings',
|
model_name="coresettings",
|
||||||
name='twilio_number',
|
name="twilio_number",
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("core", "0011_auto_20201026_0719"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="coresettings",
|
||||||
|
name="check_history_prune_days",
|
||||||
|
field=models.PositiveIntegerField(default=30),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -49,6 +49,8 @@ class CoreSettings(BaseAuditModel):
|
|||||||
default_time_zone = models.CharField(
|
default_time_zone = models.CharField(
|
||||||
max_length=255, choices=TZ_CHOICES, default="America/Los_Angeles"
|
max_length=255, choices=TZ_CHOICES, default="America/Los_Angeles"
|
||||||
)
|
)
|
||||||
|
# removes check history older than days
|
||||||
|
check_history_prune_days = models.PositiveIntegerField(default=30)
|
||||||
mesh_token = models.CharField(max_length=255, null=True, blank=True, default="")
|
mesh_token = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||||
mesh_username = models.CharField(max_length=255, null=True, blank=True, default="")
|
mesh_username = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||||
mesh_site = models.CharField(max_length=255, null=True, blank=True, default="")
|
mesh_site = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||||
|
|||||||
@@ -4,8 +4,10 @@ from loguru import logger
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
|
from core.models import CoreSettings
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
from autotasks.tasks import delete_win_task_schedule
|
from autotasks.tasks import delete_win_task_schedule
|
||||||
|
from checks.tasks import prune_check_history
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
@@ -25,3 +27,7 @@ def core_maintenance_tasks():
|
|||||||
|
|
||||||
if now > task_time_utc:
|
if now > task_time_utc:
|
||||||
delete_win_task_schedule.delay(task.pk)
|
delete_win_task_schedule.delay(task.pk)
|
||||||
|
|
||||||
|
# remove old CheckHistory data
|
||||||
|
older_than = CoreSettings.objects.first().check_history_prune_days
|
||||||
|
prune_check_history.delay(older_than)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
from core.tasks import core_maintenance_tasks
|
from core.tasks import core_maintenance_tasks
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
from core.models import CoreSettings
|
||||||
from model_bakery import baker, seq
|
from model_bakery import baker, seq
|
||||||
|
|
||||||
|
|
||||||
@@ -34,6 +35,54 @@ class TestCoreTasks(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||||
|
def test_edit_coresettings(self, generate_all_agent_checks_task):
|
||||||
|
url = "/core/editsettings/"
|
||||||
|
|
||||||
|
# setup
|
||||||
|
policies = baker.make("Policy", _quantity=2)
|
||||||
|
# test normal request
|
||||||
|
data = {
|
||||||
|
"smtp_from_email": "newexample@example.com",
|
||||||
|
"mesh_token": "New_Mesh_Token",
|
||||||
|
}
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
CoreSettings.objects.first().smtp_from_email, data["smtp_from_email"]
|
||||||
|
)
|
||||||
|
self.assertEqual(CoreSettings.objects.first().mesh_token, data["mesh_token"])
|
||||||
|
|
||||||
|
generate_all_agent_checks_task.assert_not_called()
|
||||||
|
|
||||||
|
# test adding policy
|
||||||
|
data = {
|
||||||
|
"workstation_policy": policies[0].id,
|
||||||
|
"server_policy": policies[1].id,
|
||||||
|
}
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(CoreSettings.objects.first().server_policy.id, policies[1].id)
|
||||||
|
self.assertEqual(
|
||||||
|
CoreSettings.objects.first().workstation_policy.id, policies[0].id
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(generate_all_agent_checks_task.call_count, 2)
|
||||||
|
|
||||||
|
generate_all_agent_checks_task.reset_mock()
|
||||||
|
|
||||||
|
# test remove policy
|
||||||
|
data = {
|
||||||
|
"workstation_policy": "",
|
||||||
|
}
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(CoreSettings.objects.first().workstation_policy, None)
|
||||||
|
|
||||||
|
self.assertEqual(generate_all_agent_checks_task.call_count, 1)
|
||||||
|
|
||||||
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
@patch("autotasks.tasks.remove_orphaned_win_tasks.delay")
|
@patch("autotasks.tasks.remove_orphaned_win_tasks.delay")
|
||||||
def test_ui_maintenance_actions(self, remove_orphaned_win_tasks):
|
def test_ui_maintenance_actions(self, remove_orphaned_win_tasks):
|
||||||
url = "/core/servermaintenance/"
|
url = "/core/servermaintenance/"
|
||||||
|
|||||||
@@ -42,18 +42,20 @@ def get_core_settings(request):
|
|||||||
|
|
||||||
@api_view(["PATCH"])
|
@api_view(["PATCH"])
|
||||||
def edit_settings(request):
|
def edit_settings(request):
|
||||||
settings = CoreSettings.objects.first()
|
coresettings = CoreSettings.objects.first()
|
||||||
serializer = CoreSettingsSerializer(instance=settings, data=request.data)
|
old_server_policy = coresettings.server_policy
|
||||||
|
old_workstation_policy = coresettings.workstation_policy
|
||||||
|
serializer = CoreSettingsSerializer(instance=coresettings, data=request.data)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
new_settings = serializer.save()
|
new_settings = serializer.save()
|
||||||
|
|
||||||
# check if default policies changed
|
# check if default policies changed
|
||||||
if settings.server_policy != new_settings.server_policy:
|
if old_server_policy != new_settings.server_policy:
|
||||||
generate_all_agent_checks_task.delay(
|
generate_all_agent_checks_task.delay(
|
||||||
mon_type="server", clear=True, create_tasks=True
|
mon_type="server", clear=True, create_tasks=True
|
||||||
)
|
)
|
||||||
|
|
||||||
if settings.workstation_policy != new_settings.workstation_policy:
|
if old_workstation_policy != new_settings.workstation_policy:
|
||||||
generate_all_agent_checks_task.delay(
|
generate_all_agent_checks_task.delay(
|
||||||
mon_type="workstation", clear=True, create_tasks=True
|
mon_type="workstation", clear=True, create_tasks=True
|
||||||
)
|
)
|
||||||
@@ -69,7 +71,12 @@ def version(request):
|
|||||||
@api_view()
|
@api_view()
|
||||||
def dashboard_info(request):
|
def dashboard_info(request):
|
||||||
return Response(
|
return Response(
|
||||||
{"trmm_version": settings.TRMM_VERSION, "dark_mode": request.user.dark_mode}
|
{
|
||||||
|
"trmm_version": settings.TRMM_VERSION,
|
||||||
|
"dark_mode": request.user.dark_mode,
|
||||||
|
"show_community_scripts": request.user.show_community_scripts,
|
||||||
|
"dbl_click_action": request.user.agent_dblclick_action,
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,13 +6,28 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('logs', '0007_auditlog_debug_info'),
|
("logs", "0007_auditlog_debug_info"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='auditlog',
|
model_name="auditlog",
|
||||||
name='action',
|
name="action",
|
||||||
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command')], max_length=100),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("login", "User Login"),
|
||||||
|
("failed_login", "Failed User Login"),
|
||||||
|
("delete", "Delete Object"),
|
||||||
|
("modify", "Modify Object"),
|
||||||
|
("add", "Add Object"),
|
||||||
|
("view", "View Object"),
|
||||||
|
("check_run", "Check Run"),
|
||||||
|
("task_run", "Task Run"),
|
||||||
|
("remote_session", "Remote Session"),
|
||||||
|
("execute_script", "Execute Script"),
|
||||||
|
("execute_command", "Execute Command"),
|
||||||
|
],
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,29 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('logs', '0008_auto_20201110_1431'),
|
("logs", "0008_auto_20201110_1431"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='auditlog',
|
model_name="auditlog",
|
||||||
name='action',
|
name="action",
|
||||||
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command')], max_length=100),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("login", "User Login"),
|
||||||
|
("failed_login", "Failed User Login"),
|
||||||
|
("delete", "Delete Object"),
|
||||||
|
("modify", "Modify Object"),
|
||||||
|
("add", "Add Object"),
|
||||||
|
("view", "View Object"),
|
||||||
|
("check_run", "Check Run"),
|
||||||
|
("task_run", "Task Run"),
|
||||||
|
("agent_install", "Agent Install"),
|
||||||
|
("remote_session", "Remote Session"),
|
||||||
|
("execute_script", "Execute Script"),
|
||||||
|
("execute_command", "Execute Command"),
|
||||||
|
],
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,18 +6,50 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('logs', '0009_auto_20201110_1431'),
|
("logs", "0009_auto_20201110_1431"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='auditlog',
|
model_name="auditlog",
|
||||||
name='action',
|
name="action",
|
||||||
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command'), ('bulk_action', 'Bulk Action')], max_length=100),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("login", "User Login"),
|
||||||
|
("failed_login", "Failed User Login"),
|
||||||
|
("delete", "Delete Object"),
|
||||||
|
("modify", "Modify Object"),
|
||||||
|
("add", "Add Object"),
|
||||||
|
("view", "View Object"),
|
||||||
|
("check_run", "Check Run"),
|
||||||
|
("task_run", "Task Run"),
|
||||||
|
("agent_install", "Agent Install"),
|
||||||
|
("remote_session", "Remote Session"),
|
||||||
|
("execute_script", "Execute Script"),
|
||||||
|
("execute_command", "Execute Command"),
|
||||||
|
("bulk_action", "Bulk Action"),
|
||||||
|
],
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='auditlog',
|
model_name="auditlog",
|
||||||
name='object_type',
|
name="object_type",
|
||||||
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk')], max_length=100),
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("user", "User"),
|
||||||
|
("script", "Script"),
|
||||||
|
("agent", "Agent"),
|
||||||
|
("policy", "Policy"),
|
||||||
|
("winupdatepolicy", "Patch Policy"),
|
||||||
|
("client", "Client"),
|
||||||
|
("site", "Site"),
|
||||||
|
("check", "Check"),
|
||||||
|
("automatedtask", "Automated Task"),
|
||||||
|
("coresettings", "Core Settings"),
|
||||||
|
("bulk", "Bulk"),
|
||||||
|
],
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,13 +6,22 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('logs', '0010_auto_20201110_2238'),
|
("logs", "0010_auto_20201110_2238"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='pendingaction',
|
model_name="pendingaction",
|
||||||
name='action_type',
|
name="action_type",
|
||||||
field=models.CharField(blank=True, choices=[('schedreboot', 'Scheduled Reboot'), ('taskaction', 'Scheduled Task Action'), ('agentupdate', 'Agent Update')], max_length=255, null=True),
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
choices=[
|
||||||
|
("schedreboot", "Scheduled Reboot"),
|
||||||
|
("taskaction", "Scheduled Task Action"),
|
||||||
|
("agentupdate", "Agent Update"),
|
||||||
|
],
|
||||||
|
max_length=255,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
0
api/tacticalrmm/natsapi/__init__.py
Normal file
0
api/tacticalrmm/natsapi/__init__.py
Normal file
5
api/tacticalrmm/natsapi/apps.py
Normal file
5
api/tacticalrmm/natsapi/apps.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class NatsapiConfig(AppConfig):
|
||||||
|
name = "natsapi"
|
||||||
0
api/tacticalrmm/natsapi/migrations/__init__.py
Normal file
0
api/tacticalrmm/natsapi/migrations/__init__.py
Normal file
8
api/tacticalrmm/natsapi/urls.py
Normal file
8
api/tacticalrmm/natsapi/urls.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from django.urls import path
|
||||||
|
from . import views
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path("natsinfo/", views.nats_info),
|
||||||
|
path("checkin/", views.NatsCheckIn.as_view()),
|
||||||
|
path("syncmesh/", views.SyncMeshNodeID.as_view()),
|
||||||
|
]
|
||||||
99
api/tacticalrmm/natsapi/views.py
Normal file
99
api/tacticalrmm/natsapi/views.py
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
from rest_framework.decorators import (
|
||||||
|
api_view,
|
||||||
|
permission_classes,
|
||||||
|
authentication_classes,
|
||||||
|
)
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
|
||||||
|
from agents.models import Agent
|
||||||
|
from software.models import InstalledSoftware
|
||||||
|
from checks.utils import bytes2human
|
||||||
|
from agents.serializers import WinAgentSerializer
|
||||||
|
|
||||||
|
from tacticalrmm.utils import notify_error, filter_software, SoftwareList
|
||||||
|
|
||||||
|
|
||||||
|
@api_view()
|
||||||
|
@permission_classes([])
|
||||||
|
@authentication_classes([])
|
||||||
|
def nats_info(request):
|
||||||
|
return Response({"user": "tacticalrmm", "password": settings.SECRET_KEY})
|
||||||
|
|
||||||
|
|
||||||
|
class NatsCheckIn(APIView):
|
||||||
|
|
||||||
|
authentication_classes = []
|
||||||
|
permission_classes = []
|
||||||
|
|
||||||
|
def patch(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
agent.version = request.data["version"]
|
||||||
|
agent.last_seen = djangotime.now()
|
||||||
|
agent.save(update_fields=["version", "last_seen"])
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def put(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||||
|
|
||||||
|
if request.data["func"] == "disks":
|
||||||
|
disks = request.data["disks"]
|
||||||
|
new = []
|
||||||
|
for disk in disks:
|
||||||
|
tmp = {}
|
||||||
|
for _, _ in disk.items():
|
||||||
|
tmp["device"] = disk["device"]
|
||||||
|
tmp["fstype"] = disk["fstype"]
|
||||||
|
tmp["total"] = bytes2human(disk["total"])
|
||||||
|
tmp["used"] = bytes2human(disk["used"])
|
||||||
|
tmp["free"] = bytes2human(disk["free"])
|
||||||
|
tmp["percent"] = int(disk["percent"])
|
||||||
|
new.append(tmp)
|
||||||
|
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save(disks=new)
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
if request.data["func"] == "loggedonuser":
|
||||||
|
if request.data["logged_in_username"] != "None":
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
if request.data["func"] == "software":
|
||||||
|
raw: SoftwareList = request.data["software"]
|
||||||
|
if not isinstance(raw, list):
|
||||||
|
return notify_error("err")
|
||||||
|
|
||||||
|
sw = filter_software(raw)
|
||||||
|
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||||
|
InstalledSoftware(agent=agent, software=sw).save()
|
||||||
|
else:
|
||||||
|
s = agent.installedsoftware_set.first()
|
||||||
|
s.software = sw
|
||||||
|
s.save(update_fields=["software"])
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class SyncMeshNodeID(APIView):
|
||||||
|
authentication_classes = []
|
||||||
|
permission_classes = []
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
if agent.mesh_node_id != request.data["nodeid"]:
|
||||||
|
agent.mesh_node_id = request.data["nodeid"]
|
||||||
|
agent.save(update_fields=["mesh_node_id"])
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
@@ -4,35 +4,35 @@ asyncio-nats-client==0.11.4
|
|||||||
billiard==3.6.3.0
|
billiard==3.6.3.0
|
||||||
celery==4.4.6
|
celery==4.4.6
|
||||||
certifi==2020.12.5
|
certifi==2020.12.5
|
||||||
cffi==1.14.3
|
cffi==1.14.4
|
||||||
chardet==3.0.4
|
chardet==4.0.0
|
||||||
cryptography==3.2.1
|
cryptography==3.3.1
|
||||||
decorator==4.4.2
|
decorator==4.4.2
|
||||||
Django==3.1.4
|
Django==3.1.5
|
||||||
django-cors-headers==3.5.0
|
django-cors-headers==3.6.0
|
||||||
django-rest-knox==4.1.0
|
django-rest-knox==4.1.0
|
||||||
djangorestframework==3.12.2
|
djangorestframework==3.12.2
|
||||||
future==0.18.2
|
future==0.18.2
|
||||||
idna==2.10
|
idna==2.10
|
||||||
kombu==4.6.11
|
kombu==4.6.11
|
||||||
loguru==0.5.3
|
loguru==0.5.3
|
||||||
msgpack==1.0.0
|
msgpack==1.0.2
|
||||||
packaging==20.4
|
packaging==20.8
|
||||||
psycopg2-binary==2.8.6
|
psycopg2-binary==2.8.6
|
||||||
pycparser==2.20
|
pycparser==2.20
|
||||||
pycryptodome==3.9.9
|
pycryptodome==3.9.9
|
||||||
pyotp==2.4.1
|
pyotp==2.4.1
|
||||||
pyparsing==2.4.7
|
pyparsing==2.4.7
|
||||||
pytz==2020.4
|
pytz==2020.5
|
||||||
qrcode==6.1
|
qrcode==6.1
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
requests==2.25.0
|
requests==2.25.1
|
||||||
six==1.15.0
|
six==1.15.0
|
||||||
sqlparse==0.4.1
|
sqlparse==0.4.1
|
||||||
twilio==6.49.0
|
twilio==6.51.0
|
||||||
urllib3==1.26.2
|
urllib3==1.26.2
|
||||||
uWSGI==2.0.19.1
|
uWSGI==2.0.19.1
|
||||||
validators==0.18.1
|
validators==0.18.2
|
||||||
vine==1.3.0
|
vine==1.3.0
|
||||||
websockets==8.1
|
websockets==8.1
|
||||||
zipp==3.4.0
|
zipp==3.4.0
|
||||||
|
|||||||
@@ -6,8 +6,5 @@ script = Recipe(
|
|||||||
name="Test Script",
|
name="Test Script",
|
||||||
description="Test Desc",
|
description="Test Desc",
|
||||||
shell="cmd",
|
shell="cmd",
|
||||||
filename="test.bat",
|
|
||||||
script_type="userdefined",
|
script_type="userdefined",
|
||||||
)
|
)
|
||||||
|
|
||||||
builtin_script = script.extend(script_type="builtin")
|
|
||||||
|
|||||||
@@ -96,5 +96,103 @@
|
|||||||
"name": "Check BIOS Information",
|
"name": "Check BIOS Information",
|
||||||
"description": "Retreives and reports on BIOS make, version, and date .",
|
"description": "Retreives and reports on BIOS make, version, and date .",
|
||||||
"shell": "powershell"
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "ResetHighPerformancePowerProfiletoDefaults.ps1",
|
||||||
|
"submittedBy": "https://github.com/azulskyknight",
|
||||||
|
"name": "Reset High Perf Power Profile",
|
||||||
|
"description": "Resets monitor, disk, standby, and hibernate timers in the default High Performance power profile to their default values. It also re-indexes the AC and DC power profiles into their default order.",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "SetHighPerformancePowerProfile.ps1",
|
||||||
|
"submittedBy": "https://github.com/azulskyknight",
|
||||||
|
"name": "Set High Perf Power Profile",
|
||||||
|
"description": "Sets the High Performance Power profile to the active power profile. Use this to keep machines from falling asleep.",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "Windows10Upgrade.ps1",
|
||||||
|
"submittedBy": "https://github.com/RVL-Solutions and https://github.com/darimm",
|
||||||
|
"name": "Windows 10 Upgrade",
|
||||||
|
"description": "Forces an upgrade to the latest release of Windows 10.",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "DiskStatus.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Check Disks",
|
||||||
|
"description": "Checks local disks for errors reported in event viewer within the last 24 hours",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "DuplicatiStatus.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Check Duplicati",
|
||||||
|
"description": "Checks Duplicati Backup is running properly over the last 24 hours",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "EnableDefender.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Enable Windows Defender",
|
||||||
|
"description": "Enables Windows Defender and sets preferences",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "OpenSSHServerInstall.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Install SSH",
|
||||||
|
"description": "Installs and enabled OpenSSH Server",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "RDP_enable.bat",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Enable RDP",
|
||||||
|
"description": "Enables RDP",
|
||||||
|
"shell": "cmd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "Speedtest.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "PS Speed Test",
|
||||||
|
"description": "Powershell speed test (win 10 or server2016+)",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "SyncTime.bat",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Sync DC Time",
|
||||||
|
"description": "Syncs time with domain controller",
|
||||||
|
"shell": "cmd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "WinDefenderClearLogs.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Clear Defender Logs",
|
||||||
|
"description": "Clears Windows Defender Logs",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "WinDefenderStatus.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Defender Status",
|
||||||
|
"description": "This will check for Malware, Antispyware, that Windows Defender is Healthy, last scan etc within the last 24 hours",
|
||||||
|
"shell": "powershell"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "disable_FastStartup.bat",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "Disable Fast Startup",
|
||||||
|
"description": "Disables Faststartup on Windows 10",
|
||||||
|
"shell": "cmd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "updatetacticalexclusion.ps1",
|
||||||
|
"submittedBy": "https://github.com/dinger1986",
|
||||||
|
"name": "TRMM Defender Exclusions",
|
||||||
|
"description": "Windows Defender Exclusions for Tactical RMM",
|
||||||
|
"shell": "cmd"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 3.1.3 on 2020-12-07 15:58
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("scripts", "0003_auto_20200922_1344"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="script",
|
||||||
|
name="category",
|
||||||
|
field=models.CharField(blank=True, max_length=100, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="script",
|
||||||
|
name="favorite",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="script",
|
||||||
|
name="script_base64",
|
||||||
|
field=models.TextField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.3 on 2020-12-07 16:06
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("scripts", "0004_auto_20201207_1558"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name="script",
|
||||||
|
old_name="script_base64",
|
||||||
|
new_name="code_base64",
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
# Generated by Django 3.1.4 on 2020-12-10 21:45
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
from django.conf import settings
|
||||||
|
import os
|
||||||
|
import base64
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def move_scripts_to_db(apps, schema_editor):
|
||||||
|
print("")
|
||||||
|
Script = apps.get_model("scripts", "Script")
|
||||||
|
for script in Script.objects.all():
|
||||||
|
if not script.script_type == "builtin":
|
||||||
|
|
||||||
|
if script.filename:
|
||||||
|
filepath = f"{settings.SCRIPTS_DIR}/userdefined/{script.filename}"
|
||||||
|
else:
|
||||||
|
print(f"No filename on script found. Skipping")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# test if file exists
|
||||||
|
if os.path.exists(filepath):
|
||||||
|
print(f"Found script {script.name}. Importing code.")
|
||||||
|
|
||||||
|
with open(filepath, "rb") as f:
|
||||||
|
script_bytes = f.read().decode("utf-8").encode("ascii", "ignore")
|
||||||
|
script.code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||||
|
script.save(update_fields=["code_base64"])
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Script file {script.name} was not found on the disk. You will need to edit the script in the UI"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("scripts", "0005_auto_20201207_1606"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [migrations.RunPython(move_scripts_to_db, migrations.RunPython.noop)]
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import base64
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -17,41 +18,27 @@ SCRIPT_TYPES = [
|
|||||||
class Script(BaseAuditModel):
|
class Script(BaseAuditModel):
|
||||||
name = models.CharField(max_length=255)
|
name = models.CharField(max_length=255)
|
||||||
description = models.TextField(null=True, blank=True)
|
description = models.TextField(null=True, blank=True)
|
||||||
filename = models.CharField(max_length=255)
|
filename = models.CharField(max_length=255) # deprecated
|
||||||
shell = models.CharField(
|
shell = models.CharField(
|
||||||
max_length=100, choices=SCRIPT_SHELLS, default="powershell"
|
max_length=100, choices=SCRIPT_SHELLS, default="powershell"
|
||||||
)
|
)
|
||||||
script_type = models.CharField(
|
script_type = models.CharField(
|
||||||
max_length=100, choices=SCRIPT_TYPES, default="userdefined"
|
max_length=100, choices=SCRIPT_TYPES, default="userdefined"
|
||||||
)
|
)
|
||||||
|
favorite = models.BooleanField(default=False)
|
||||||
|
category = models.CharField(max_length=100, null=True, blank=True)
|
||||||
|
code_base64 = models.TextField(null=True, blank=True)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.filename
|
return self.name
|
||||||
|
|
||||||
@property
|
|
||||||
def filepath(self):
|
|
||||||
# for the windows agent when using 'salt-call'
|
|
||||||
if self.script_type == "userdefined":
|
|
||||||
return f"salt://scripts//userdefined//{self.filename}"
|
|
||||||
else:
|
|
||||||
return f"salt://scripts//{self.filename}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def file(self):
|
|
||||||
if self.script_type == "userdefined":
|
|
||||||
return f"{settings.SCRIPTS_DIR}/userdefined/{self.filename}"
|
|
||||||
else:
|
|
||||||
return f"{settings.SCRIPTS_DIR}/{self.filename}"
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def code(self):
|
def code(self):
|
||||||
try:
|
if self.code_base64:
|
||||||
with open(self.file, "r") as f:
|
base64_bytes = self.code_base64.encode("ascii", "ignore")
|
||||||
text = f.read()
|
return base64.b64decode(base64_bytes).decode("ascii", "ignore")
|
||||||
except:
|
else:
|
||||||
text = "n/a"
|
return ""
|
||||||
|
|
||||||
return text
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_community_scripts(cls):
|
def load_community_scripts(cls):
|
||||||
@@ -79,21 +66,40 @@ class Script(BaseAuditModel):
|
|||||||
for script in info:
|
for script in info:
|
||||||
if os.path.exists(os.path.join(scripts_dir, script["filename"])):
|
if os.path.exists(os.path.join(scripts_dir, script["filename"])):
|
||||||
s = cls.objects.filter(script_type="builtin").filter(
|
s = cls.objects.filter(script_type="builtin").filter(
|
||||||
filename=script["filename"]
|
name=script["name"]
|
||||||
)
|
)
|
||||||
if s.exists():
|
if s.exists():
|
||||||
i = s.first()
|
i = s.first()
|
||||||
i.name = script["name"]
|
i.name = script["name"]
|
||||||
i.description = script["description"]
|
i.description = script["description"]
|
||||||
i.save(update_fields=["name", "description"])
|
i.category = "Community"
|
||||||
|
|
||||||
|
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||||
|
script_bytes = (
|
||||||
|
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||||
|
)
|
||||||
|
i.code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||||
|
|
||||||
|
i.save(
|
||||||
|
update_fields=["name", "description", "category", "code_base64"]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print(f"Adding new community script: {script['name']}")
|
print(f"Adding new community script: {script['name']}")
|
||||||
|
|
||||||
|
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||||
|
script_bytes = (
|
||||||
|
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||||
|
)
|
||||||
|
code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||||
|
|
||||||
cls(
|
cls(
|
||||||
|
code_base64=code_base64,
|
||||||
name=script["name"],
|
name=script["name"],
|
||||||
description=script["description"],
|
description=script["description"],
|
||||||
filename=script["filename"],
|
filename=script["filename"],
|
||||||
shell=script["shell"],
|
shell=script["shell"],
|
||||||
script_type="builtin",
|
script_type="builtin",
|
||||||
|
category="Community",
|
||||||
).save()
|
).save()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
@@ -1,41 +1,33 @@
|
|||||||
import os
|
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from rest_framework.serializers import ModelSerializer, ValidationError, ReadOnlyField
|
|
||||||
from .models import Script
|
from .models import Script
|
||||||
|
|
||||||
|
|
||||||
class ScriptSerializer(ModelSerializer):
|
class ScriptTableSerializer(ModelSerializer):
|
||||||
|
|
||||||
code = ReadOnlyField()
|
|
||||||
filepath = ReadOnlyField()
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Script
|
model = Script
|
||||||
fields = "__all__"
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"script_type",
|
||||||
|
"shell",
|
||||||
|
"category",
|
||||||
|
"favorite",
|
||||||
|
]
|
||||||
|
|
||||||
def validate(self, val):
|
|
||||||
if "filename" in val:
|
|
||||||
# validate the filename
|
|
||||||
if (
|
|
||||||
not val["filename"].endswith(".py")
|
|
||||||
and not val["filename"].endswith(".ps1")
|
|
||||||
and not val["filename"].endswith(".bat")
|
|
||||||
):
|
|
||||||
raise ValidationError("File types supported are .py, .ps1 and .bat")
|
|
||||||
|
|
||||||
# make sure file doesn't already exist on server
|
class ScriptSerializer(ModelSerializer):
|
||||||
# but only if adding, not if editing since will overwrite if edit
|
class Meta:
|
||||||
if not self.instance:
|
model = Script
|
||||||
script_path = os.path.join(
|
fields = [
|
||||||
f"{settings.SCRIPTS_DIR}/userdefined", val["filename"]
|
"id",
|
||||||
)
|
"name",
|
||||||
if os.path.exists(script_path):
|
"description",
|
||||||
raise ValidationError(
|
"shell",
|
||||||
f"{val['filename']} already exists. Delete or edit the existing script first."
|
"category",
|
||||||
)
|
"favorite",
|
||||||
|
"code_base64",
|
||||||
return val
|
]
|
||||||
|
|
||||||
|
|
||||||
class ScriptCheckSerializer(ModelSerializer):
|
class ScriptCheckSerializer(ModelSerializer):
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
from .serializers import ScriptSerializer
|
from .serializers import ScriptSerializer, ScriptTableSerializer
|
||||||
from .models import Script
|
from .models import Script
|
||||||
|
|
||||||
|
|
||||||
@@ -16,16 +17,50 @@ class TestScriptViews(TacticalTestCase):
|
|||||||
url = "/scripts/scripts/"
|
url = "/scripts/scripts/"
|
||||||
scripts = baker.make("scripts.Script", _quantity=3)
|
scripts = baker.make("scripts.Script", _quantity=3)
|
||||||
|
|
||||||
serializer = ScriptSerializer(scripts, many=True)
|
serializer = ScriptTableSerializer(scripts, many=True)
|
||||||
resp = self.client.get(url, format="json")
|
resp = self.client.get(url, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEqual(serializer.data, resp.data)
|
self.assertEqual(serializer.data, resp.data)
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
# TODO Need to test file uploads and saves
|
|
||||||
def test_add_script(self):
|
def test_add_script(self):
|
||||||
pass
|
url = f"/scripts/scripts/"
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"name": "Name",
|
||||||
|
"description": "Description",
|
||||||
|
"shell": "powershell",
|
||||||
|
"category": "New",
|
||||||
|
"code": "Some Test Code\nnew Line",
|
||||||
|
}
|
||||||
|
|
||||||
|
# test without file upload
|
||||||
|
resp = self.client.post(url, data)
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertTrue(Script.objects.filter(name="Name").exists())
|
||||||
|
self.assertEqual(Script.objects.get(name="Name").code, data["code"])
|
||||||
|
|
||||||
|
# test with file upload
|
||||||
|
# file with 'Test' as content
|
||||||
|
file = SimpleUploadedFile(
|
||||||
|
"test_script.bat", b"\x54\x65\x73\x74", content_type="text/plain"
|
||||||
|
)
|
||||||
|
data = {
|
||||||
|
"name": "New Name",
|
||||||
|
"description": "Description",
|
||||||
|
"shell": "cmd",
|
||||||
|
"category": "New",
|
||||||
|
"filename": file,
|
||||||
|
}
|
||||||
|
|
||||||
|
# test with file upload
|
||||||
|
resp = self.client.post(url, data, format="multipart")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
script = Script.objects.filter(name="New Name").first()
|
||||||
|
self.assertEquals(script.code, "Test")
|
||||||
|
|
||||||
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
def test_modify_script(self):
|
def test_modify_script(self):
|
||||||
# test a call where script doesn't exist
|
# test a call where script doesn't exist
|
||||||
@@ -40,23 +75,39 @@ class TestScriptViews(TacticalTestCase):
|
|||||||
"name": script.name,
|
"name": script.name,
|
||||||
"description": "Description Change",
|
"description": "Description Change",
|
||||||
"shell": script.shell,
|
"shell": script.shell,
|
||||||
|
"code": "Test Code\nAnother Line",
|
||||||
}
|
}
|
||||||
|
|
||||||
# test edit a userdefined script
|
# test edit a userdefined script
|
||||||
resp = self.client.put(url, data, format="json")
|
resp = self.client.put(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEquals(
|
script = Script.objects.get(pk=script.pk)
|
||||||
Script.objects.get(pk=script.pk).description, "Description Change"
|
self.assertEquals(script.description, "Description Change")
|
||||||
)
|
self.assertEquals(script.code, "Test Code\nAnother Line")
|
||||||
|
|
||||||
# test edit a builtin script
|
# test edit a builtin script
|
||||||
builtin_script = baker.make_recipe("scripts.builtin_script")
|
|
||||||
|
data = {"name": "New Name", "description": "New Desc", "code": "Some New Code"}
|
||||||
|
builtin_script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||||
|
|
||||||
resp = self.client.put(
|
resp = self.client.put(
|
||||||
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
||||||
)
|
)
|
||||||
self.assertEqual(resp.status_code, 400)
|
self.assertEqual(resp.status_code, 400)
|
||||||
|
|
||||||
# TODO Test changing script file
|
data = {
|
||||||
|
"name": script.name,
|
||||||
|
"description": "Description Change",
|
||||||
|
"shell": script.shell,
|
||||||
|
"favorite": True,
|
||||||
|
"code": "Test Code\nAnother Line",
|
||||||
|
}
|
||||||
|
# test marking a builtin script as favorite
|
||||||
|
resp = self.client.put(
|
||||||
|
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
||||||
|
)
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertTrue(Script.objects.get(pk=builtin_script.pk).favorite)
|
||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
@@ -79,6 +130,7 @@ class TestScriptViews(TacticalTestCase):
|
|||||||
resp = self.client.delete("/scripts/500/script/", format="json")
|
resp = self.client.delete("/scripts/500/script/", format="json")
|
||||||
self.assertEqual(resp.status_code, 404)
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
|
# test delete script
|
||||||
script = baker.make_recipe("scripts.script")
|
script = baker.make_recipe("scripts.script")
|
||||||
url = f"/scripts/{script.pk}/script/"
|
url = f"/scripts/{script.pk}/script/"
|
||||||
resp = self.client.delete(url, format="json")
|
resp = self.client.delete(url, format="json")
|
||||||
@@ -86,13 +138,50 @@ class TestScriptViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.assertFalse(Script.objects.filter(pk=script.pk).exists())
|
self.assertFalse(Script.objects.filter(pk=script.pk).exists())
|
||||||
|
|
||||||
|
# test delete community script
|
||||||
|
script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||||
|
url = f"/scripts/{script.pk}/script/"
|
||||||
|
resp = self.client.delete(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 400)
|
||||||
|
|
||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
# TODO Need to mock file open
|
|
||||||
def test_download_script(self):
|
def test_download_script(self):
|
||||||
pass
|
# test a call where script doesn't exist
|
||||||
|
resp = self.client.get("/scripts/500/download/", format="json")
|
||||||
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
def test_load_community_scripts(self):
|
# return script code property should be "Test"
|
||||||
|
|
||||||
|
# test powershell file
|
||||||
|
script = baker.make(
|
||||||
|
"scripts.Script", code_base64="VGVzdA==", shell="powershell"
|
||||||
|
)
|
||||||
|
url = f"/scripts/{script.pk}/download/"
|
||||||
|
|
||||||
|
resp = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(resp.data, {"filename": f"{script.name}.ps1", "code": "Test"})
|
||||||
|
|
||||||
|
# test batch file
|
||||||
|
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="cmd")
|
||||||
|
url = f"/scripts/{script.pk}/download/"
|
||||||
|
|
||||||
|
resp = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(resp.data, {"filename": f"{script.name}.bat", "code": "Test"})
|
||||||
|
|
||||||
|
# test python file
|
||||||
|
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="python")
|
||||||
|
url = f"/scripts/{script.pk}/download/"
|
||||||
|
|
||||||
|
resp = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(resp.data, {"filename": f"{script.name}.py", "code": "Test"})
|
||||||
|
|
||||||
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
def test_community_script_json_file(self):
|
||||||
valid_shells = ["powershell", "python", "cmd"]
|
valid_shells = ["powershell", "python", "cmd"]
|
||||||
|
|
||||||
if not settings.DOCKER_BUILD:
|
if not settings.DOCKER_BUILD:
|
||||||
@@ -113,5 +202,19 @@ class TestScriptViews(TacticalTestCase):
|
|||||||
self.assertTrue(script["name"])
|
self.assertTrue(script["name"])
|
||||||
self.assertTrue(script["description"])
|
self.assertTrue(script["description"])
|
||||||
self.assertTrue(script["shell"])
|
self.assertTrue(script["shell"])
|
||||||
self.assertTrue(script["description"])
|
|
||||||
self.assertIn(script["shell"], valid_shells)
|
self.assertIn(script["shell"], valid_shells)
|
||||||
|
|
||||||
|
def test_load_community_scripts(self):
|
||||||
|
with open(
|
||||||
|
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||||
|
) as f:
|
||||||
|
info = json.load(f)
|
||||||
|
|
||||||
|
Script.load_community_scripts()
|
||||||
|
|
||||||
|
community_scripts = Script.objects.filter(script_type="builtin").count()
|
||||||
|
self.assertEqual(len(info), community_scripts)
|
||||||
|
|
||||||
|
# test updating already added community scripts
|
||||||
|
Script.load_community_scripts()
|
||||||
|
self.assertEqual(len(info), community_scripts)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import os
|
import base64
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
@@ -11,9 +11,10 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.parsers import FileUploadParser
|
from rest_framework.parsers import FileUploadParser
|
||||||
|
|
||||||
from .models import Script
|
from .models import Script
|
||||||
from .serializers import ScriptSerializer
|
from .serializers import ScriptSerializer, ScriptTableSerializer
|
||||||
from tacticalrmm.utils import notify_error
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
@@ -22,74 +23,65 @@ class GetAddScripts(APIView):
|
|||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
scripts = Script.objects.all()
|
scripts = Script.objects.all()
|
||||||
return Response(ScriptSerializer(scripts, many=True).data)
|
return Response(ScriptTableSerializer(scripts, many=True).data)
|
||||||
|
|
||||||
def put(self, request, format=None):
|
def post(self, request, format=None):
|
||||||
|
|
||||||
file_obj = request.data["filename"] # the actual file in_memory object
|
|
||||||
|
|
||||||
# need to manually create the serialized data
|
|
||||||
# since javascript formData doesn't support JSON
|
|
||||||
filename = str(file_obj)
|
|
||||||
data = {
|
data = {
|
||||||
"name": request.data["name"],
|
"name": request.data["name"],
|
||||||
"filename": filename,
|
"category": request.data["category"],
|
||||||
"description": request.data["description"],
|
"description": request.data["description"],
|
||||||
"shell": request.data["shell"],
|
"shell": request.data["shell"],
|
||||||
"script_type": "userdefined", # force all uploads to be userdefined. built in scripts cannot be edited by user
|
"script_type": "userdefined", # force all uploads to be userdefined. built in scripts cannot be edited by user
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if "favorite" in request.data:
|
||||||
|
data["favorite"] = request.data["favorite"]
|
||||||
|
|
||||||
|
if "filename" in request.data:
|
||||||
|
message_bytes = request.data["filename"].read()
|
||||||
|
data["code_base64"] = base64.b64encode(message_bytes).decode(
|
||||||
|
"ascii", "ignore"
|
||||||
|
)
|
||||||
|
|
||||||
|
elif "code" in request.data:
|
||||||
|
message_bytes = request.data["code"].encode("ascii", "ignore")
|
||||||
|
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||||
|
|
||||||
serializer = ScriptSerializer(data=data, partial=True)
|
serializer = ScriptSerializer(data=data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
obj = serializer.save()
|
obj = serializer.save()
|
||||||
|
|
||||||
with open(obj.file, "wb+") as f:
|
|
||||||
for chunk in file_obj.chunks():
|
|
||||||
f.write(chunk)
|
|
||||||
|
|
||||||
return Response(f"{obj.name} was added!")
|
return Response(f"{obj.name} was added!")
|
||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteScript(APIView):
|
class GetUpdateDeleteScript(APIView):
|
||||||
parser_class = (FileUploadParser,)
|
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
script = get_object_or_404(Script, pk=pk)
|
script = get_object_or_404(Script, pk=pk)
|
||||||
return Response(ScriptSerializer(script).data)
|
return Response(ScriptSerializer(script).data)
|
||||||
|
|
||||||
def put(self, request, pk, format=None):
|
def put(self, request, pk):
|
||||||
script = get_object_or_404(Script, pk=pk)
|
script = get_object_or_404(Script, pk=pk)
|
||||||
|
|
||||||
# this will never trigger but check anyway
|
data = request.data
|
||||||
|
|
||||||
if script.script_type == "builtin":
|
if script.script_type == "builtin":
|
||||||
return notify_error("Built in scripts cannot be edited")
|
# allow only favoriting builtin scripts
|
||||||
|
if "favorite" in data:
|
||||||
|
# overwrite request data
|
||||||
|
data = {"favorite": data["favorite"]}
|
||||||
|
else:
|
||||||
|
return notify_error("Community scripts cannot be edited.")
|
||||||
|
|
||||||
data = {
|
elif "code" in data:
|
||||||
"name": request.data["name"],
|
message_bytes = data["code"].encode("ascii")
|
||||||
"description": request.data["description"],
|
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||||
"shell": request.data["shell"],
|
data.pop("code")
|
||||||
}
|
|
||||||
|
|
||||||
# if uploading a new version of the script
|
|
||||||
if "filename" in request.data:
|
|
||||||
file_obj = request.data["filename"]
|
|
||||||
data["filename"] = str(file_obj)
|
|
||||||
|
|
||||||
serializer = ScriptSerializer(data=data, instance=script, partial=True)
|
serializer = ScriptSerializer(data=data, instance=script, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
obj = serializer.save()
|
obj = serializer.save()
|
||||||
|
|
||||||
if "filename" in request.data:
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.remove(obj.file)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
with open(obj.file, "wb+") as f:
|
|
||||||
for chunk in file_obj.chunks():
|
|
||||||
f.write(chunk)
|
|
||||||
|
|
||||||
return Response(f"{obj.name} was edited!")
|
return Response(f"{obj.name} was edited!")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
@@ -97,12 +89,7 @@ class GetUpdateDeleteScript(APIView):
|
|||||||
|
|
||||||
# this will never trigger but check anyway
|
# this will never trigger but check anyway
|
||||||
if script.script_type == "builtin":
|
if script.script_type == "builtin":
|
||||||
return notify_error("Built in scripts cannot be deleted")
|
return notify_error("Community scripts cannot be deleted")
|
||||||
|
|
||||||
try:
|
|
||||||
os.remove(script.file)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
script.delete()
|
script.delete()
|
||||||
return Response(f"{script.name} was deleted!")
|
return Response(f"{script.name} was deleted!")
|
||||||
@@ -111,33 +98,12 @@ class GetUpdateDeleteScript(APIView):
|
|||||||
@api_view()
|
@api_view()
|
||||||
def download(request, pk):
|
def download(request, pk):
|
||||||
script = get_object_or_404(Script, pk=pk)
|
script = get_object_or_404(Script, pk=pk)
|
||||||
use_nginx = False
|
|
||||||
conf = "/etc/nginx/sites-available/rmm.conf"
|
|
||||||
|
|
||||||
if os.path.exists(conf):
|
if script.shell == "powershell":
|
||||||
try:
|
filename = f"{script.name}.ps1"
|
||||||
with open(conf) as f:
|
elif script.shell == "cmd":
|
||||||
for line in f.readlines():
|
filename = f"{script.name}.bat"
|
||||||
if "location" and "builtin" in line:
|
|
||||||
use_nginx = True
|
|
||||||
break
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(e)
|
|
||||||
else:
|
else:
|
||||||
use_nginx = True
|
filename = f"{script.name}.py"
|
||||||
|
|
||||||
if settings.DEBUG or not use_nginx:
|
return Response({"filename": filename, "code": script.code})
|
||||||
with open(script.file, "rb") as f:
|
|
||||||
response = HttpResponse(f.read(), content_type="text/plain")
|
|
||||||
response["Content-Disposition"] = f"attachment; filename={script.filename}"
|
|
||||||
return response
|
|
||||||
else:
|
|
||||||
response = HttpResponse()
|
|
||||||
response["Content-Disposition"] = f"attachment; filename={script.filename}"
|
|
||||||
|
|
||||||
response["X-Accel-Redirect"] = (
|
|
||||||
f"/saltscripts/{script.filename}"
|
|
||||||
if script.script_type == "userdefined"
|
|
||||||
else f"/builtin/{script.filename}"
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
|
|||||||
@@ -33,9 +33,9 @@ app.conf.beat_schedule = {
|
|||||||
"task": "winupdate.tasks.check_agent_update_schedule_task",
|
"task": "winupdate.tasks.check_agent_update_schedule_task",
|
||||||
"schedule": crontab(minute=5, hour="*"),
|
"schedule": crontab(minute=5, hour="*"),
|
||||||
},
|
},
|
||||||
"sync-modules": {
|
"agents-checkinfull": {
|
||||||
"task": "agents.tasks.batch_sync_modules_task",
|
"task": "agents.tasks.check_in_task",
|
||||||
"schedule": crontab(minute=25, hour="*/4"),
|
"schedule": crontab(minute="*/24"),
|
||||||
},
|
},
|
||||||
"agent-auto-update": {
|
"agent-auto-update": {
|
||||||
"task": "agents.tasks.auto_self_agent_update_task",
|
"task": "agents.tasks.auto_self_agent_update_task",
|
||||||
@@ -45,6 +45,10 @@ app.conf.beat_schedule = {
|
|||||||
"task": "agents.tasks.sync_sysinfo_task",
|
"task": "agents.tasks.sync_sysinfo_task",
|
||||||
"schedule": crontab(minute=55, hour="*"),
|
"schedule": crontab(minute=55, hour="*"),
|
||||||
},
|
},
|
||||||
|
"check-agentservice": {
|
||||||
|
"task": "agents.tasks.monitor_agents_task",
|
||||||
|
"schedule": crontab(minute="*/15"),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ def get_debug_info():
|
|||||||
|
|
||||||
|
|
||||||
EXCLUDE_PATHS = (
|
EXCLUDE_PATHS = (
|
||||||
|
"/natsapi",
|
||||||
"/api/v3",
|
"/api/v3",
|
||||||
"/api/v2",
|
"/api/v2",
|
||||||
"/logs/auditlogs",
|
"/logs/auditlogs",
|
||||||
|
|||||||
@@ -15,25 +15,25 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
|
|||||||
AUTH_USER_MODEL = "accounts.User"
|
AUTH_USER_MODEL = "accounts.User"
|
||||||
|
|
||||||
# latest release
|
# latest release
|
||||||
TRMM_VERSION = "0.2.9"
|
TRMM_VERSION = "0.2.23"
|
||||||
|
|
||||||
# bump this version everytime vue code is changed
|
# bump this version everytime vue code is changed
|
||||||
# to alert user they need to manually refresh their browser
|
# to alert user they need to manually refresh their browser
|
||||||
APP_VER = "0.0.98"
|
APP_VER = "0.0.103"
|
||||||
|
|
||||||
# https://github.com/wh1te909/salt
|
# https://github.com/wh1te909/salt
|
||||||
LATEST_SALT_VER = "1.1.0"
|
LATEST_SALT_VER = "1.1.0"
|
||||||
|
|
||||||
# https://github.com/wh1te909/rmmagent
|
# https://github.com/wh1te909/rmmagent
|
||||||
LATEST_AGENT_VER = "1.1.4"
|
LATEST_AGENT_VER = "1.1.12"
|
||||||
|
|
||||||
MESH_VER = "0.7.14"
|
MESH_VER = "0.7.45"
|
||||||
|
|
||||||
SALT_MASTER_VER = "3002.2"
|
SALT_MASTER_VER = "3002.2"
|
||||||
|
|
||||||
# for the update script, bump when need to recreate venv or npm install
|
# for the update script, bump when need to recreate venv or npm install
|
||||||
PIP_VER = "4"
|
PIP_VER = "6"
|
||||||
NPM_VER = "3"
|
NPM_VER = "6"
|
||||||
|
|
||||||
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
|
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
|
||||||
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
|
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
|
||||||
@@ -72,6 +72,7 @@ INSTALLED_APPS = [
|
|||||||
"logs",
|
"logs",
|
||||||
"scripts",
|
"scripts",
|
||||||
"alerts",
|
"alerts",
|
||||||
|
"natsapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
if not "TRAVIS" in os.environ and not "AZPIPELINE" in os.environ:
|
if not "TRAVIS" in os.environ and not "AZPIPELINE" in os.environ:
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ class TacticalTestCase(TestCase):
|
|||||||
self.john = User(username="john")
|
self.john = User(username="john")
|
||||||
self.john.set_password("hunter2")
|
self.john.set_password("hunter2")
|
||||||
self.john.save()
|
self.john.save()
|
||||||
|
self.alice = User(username="alice")
|
||||||
|
self.alice.set_password("hunter2")
|
||||||
|
self.alice.save()
|
||||||
self.client_setup()
|
self.client_setup()
|
||||||
self.client.force_authenticate(user=self.john)
|
self.client.force_authenticate(user=self.john)
|
||||||
|
|
||||||
|
|||||||
@@ -25,4 +25,5 @@ urlpatterns = [
|
|||||||
path("scripts/", include("scripts.urls")),
|
path("scripts/", include("scripts.urls")),
|
||||||
path("alerts/", include("alerts.urls")),
|
path("alerts/", include("alerts.urls")),
|
||||||
path("accounts/", include("accounts.urls")),
|
path("accounts/", include("accounts.urls")),
|
||||||
|
path("natsapi/", include("natsapi.urls")),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -28,29 +28,35 @@ jobs:
|
|||||||
cd /myagent/_work/1/s/api/tacticalrmm
|
cd /myagent/_work/1/s/api/tacticalrmm
|
||||||
pip install --no-cache-dir --upgrade pip
|
pip install --no-cache-dir --upgrade pip
|
||||||
pip install --no-cache-dir setuptools==50.3.2 wheel==0.36.1
|
pip install --no-cache-dir setuptools==50.3.2 wheel==0.36.1
|
||||||
pip install --no-cache-dir -r requirements.txt -r requirements-test.txt
|
pip install --no-cache-dir -r requirements.txt -r requirements-test.txt -r requirements-dev.txt
|
||||||
displayName: "Install Python Dependencies"
|
displayName: "Install Python Dependencies"
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
cd /myagent/_work/1/s/api
|
cd /myagent/_work/1/s/api
|
||||||
git config user.email "admin@example.com"
|
|
||||||
git config user.name "Bob"
|
|
||||||
git fetch
|
|
||||||
git checkout develop
|
|
||||||
git pull
|
|
||||||
source env/bin/activate
|
source env/bin/activate
|
||||||
cd /myagent/_work/1/s/api/tacticalrmm
|
cd /myagent/_work/1/s/api/tacticalrmm
|
||||||
coverage run manage.py test -v 2
|
coverage run manage.py test -v 2
|
||||||
coveralls
|
if [ $? -ne 0 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
displayName: "Run django tests"
|
displayName: "Run django tests"
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
rm -rf /myagent/_work/1/s/web/node_modules
|
cd /myagent/_work/1/s/api
|
||||||
cd /myagent/_work/1/s/web
|
source env/bin/activate
|
||||||
npm install
|
black --check tacticalrmm
|
||||||
displayName: "Install Frontend"
|
if [ $? -ne 0 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
displayName: "Codestyle black"
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
cd /myagent/_work/1/s/web
|
cd /myagent/_work/1/s/api
|
||||||
npm run test:unit
|
source env/bin/activate
|
||||||
displayName: "Run Vue Tests"
|
cd /myagent/_work/1/s/api/tacticalrmm
|
||||||
|
export CIRCLE_BRANCH=$BUILD_SOURCEBRANCH
|
||||||
|
coveralls
|
||||||
|
displayName: "coveralls"
|
||||||
|
env:
|
||||||
|
CIRCLECI: 1
|
||||||
|
CIRCLE_BUILD_NUM: $(Build.BuildNumber)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
SCRIPT_VERSION="4"
|
SCRIPT_VERSION="5"
|
||||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
|
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
|
||||||
|
|
||||||
GREEN='\033[0;32m'
|
GREEN='\033[0;32m'
|
||||||
@@ -50,6 +50,11 @@ if [ -d /meshcentral/meshcentral-coredumps ]; then
|
|||||||
rm -f /meshcentral/meshcentral-coredumps/*
|
rm -f /meshcentral/meshcentral-coredumps/*
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
printf >&2 "${GREEN}Running postgres vacuum${NC}\n"
|
||||||
|
sudo -u postgres psql -d tacticalrmm -c "vacuum full logs_auditlog"
|
||||||
|
sudo -u postgres psql -d tacticalrmm -c "vacuum full logs_pendingaction"
|
||||||
|
sudo -u postgres psql -d tacticalrmm -c "vacuum full agents_agentoutage"
|
||||||
|
|
||||||
dt_now=$(date '+%Y_%m_%d__%H_%M_%S')
|
dt_now=$(date '+%Y_%m_%d__%H_%M_%S')
|
||||||
tmp_dir=$(mktemp -d -t tacticalrmm-XXXXXXXXXXXXXXXXXXXXX)
|
tmp_dir=$(mktemp -d -t tacticalrmm-XXXXXXXXXXXXXXXXXXXXX)
|
||||||
sysd="/etc/systemd/system"
|
sysd="/etc/systemd/system"
|
||||||
|
|||||||
@@ -41,12 +41,7 @@ mesh_config="$(cat << EOF
|
|||||||
"NewAccounts": false,
|
"NewAccounts": false,
|
||||||
"mstsc": true,
|
"mstsc": true,
|
||||||
"GeoLocation": true,
|
"GeoLocation": true,
|
||||||
"CertUrl": "https://${NGINX_HOST_IP}:443",
|
"CertUrl": "https://${NGINX_HOST_IP}:443"
|
||||||
"httpheaders": {
|
|
||||||
"Strict-Transport-Security": "max-age=360000",
|
|
||||||
"_x-frame-options": "sameorigin",
|
|
||||||
"Content-Security-Policy": "default-src 'none'; script-src 'self' 'unsafe-inline'; connect-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-src 'self'; media-src 'self'"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,9 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
: "${APP_PORT:=80}"
|
||||||
|
: "${API_PORT:=80}"
|
||||||
|
|
||||||
CERT_PRIV_PATH=${TACTICAL_DIR}/certs/privkey.pem
|
CERT_PRIV_PATH=${TACTICAL_DIR}/certs/privkey.pem
|
||||||
CERT_PUB_PATH=${TACTICAL_DIR}/certs/fullchain.pem
|
CERT_PUB_PATH=${TACTICAL_DIR}/certs/fullchain.pem
|
||||||
|
|
||||||
@@ -31,7 +34,7 @@ server {
|
|||||||
|
|
||||||
location / {
|
location / {
|
||||||
#Using variable to disable start checks
|
#Using variable to disable start checks
|
||||||
set \$api http://tactical-backend;
|
set \$api http://tactical-backend:${API_PORT};
|
||||||
|
|
||||||
proxy_pass \$api;
|
proxy_pass \$api;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
@@ -95,7 +98,7 @@ server {
|
|||||||
|
|
||||||
location / {
|
location / {
|
||||||
#Using variable to disable start checks
|
#Using variable to disable start checks
|
||||||
set \$app http://tactical-frontend;
|
set \$app http://tactical-frontend:${APP_PORT};
|
||||||
|
|
||||||
proxy_pass \$app;
|
proxy_pass \$app;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
|
|||||||
44
docker/install.sh
Executable file
44
docker/install.sh
Executable file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -o nounset
|
||||||
|
set -o errexit
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
temp="/tmp/tactical"
|
||||||
|
|
||||||
|
args="$*"
|
||||||
|
version="latest"
|
||||||
|
branch="master"
|
||||||
|
|
||||||
|
branchRegex=" --branch ([^ ]+)"
|
||||||
|
if [[ " ${args}" =~ ${branchRegex} ]]; then
|
||||||
|
branch="${BASH_REMATCH[1]}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "branch=${branch}"
|
||||||
|
tactical_cli="https://raw.githubusercontent.com/wh1te909/tacticalrmm/${branch}/docker/tactical-cli"
|
||||||
|
|
||||||
|
versionRegex=" --version ([^ ]+)"
|
||||||
|
if [[ " ${args}" =~ ${versionRegex} ]]; then
|
||||||
|
version="${BASH_REMATCH[1]}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -rf "${temp}"
|
||||||
|
if ! mkdir "${temp}"; then
|
||||||
|
echo >&2 "Failed to create temporary directory"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd "${temp}"
|
||||||
|
echo "Downloading tactical-cli from branch ${branch}"
|
||||||
|
if ! curl -sS "${tactical_cli}"; then
|
||||||
|
echo >&2 "Failed to download installation package ${tactical_cli}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
chmod +x tactical-cli
|
||||||
|
./tactical-cli ${args} --version "${version}" 2>&1 | tee -a ~/install.log
|
||||||
|
|
||||||
|
cd ~
|
||||||
|
if ! rm -rf "${temp}"; then
|
||||||
|
echo >&2 "Warning: Failed to remove temporary directory ${temp}"
|
||||||
|
fi
|
||||||
439
docker/tactical-cli
Normal file
439
docker/tactical-cli
Normal file
@@ -0,0 +1,439 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -o nounset
|
||||||
|
set -o errexit
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
# FUNCTIONS
|
||||||
|
function ask_questions {
|
||||||
|
|
||||||
|
while [[ -z "$API_HOST" ]] && [[ "$API_HOST" != *[.]*[.]* ]]
|
||||||
|
do
|
||||||
|
echo -ne "Enter the subdomain for the backend (e.g. api.example.com): "
|
||||||
|
read API_HOST
|
||||||
|
done
|
||||||
|
echo "API_HOST is set to ${API_HOST}"
|
||||||
|
|
||||||
|
while [[ -z "$APP_HOST" ]] && [[ "$APP_HOST" != *[.]*[.]* ]]
|
||||||
|
do
|
||||||
|
echo -ne "Enter the subdomain for the frontend (e.g. rmm.example.com): "
|
||||||
|
read APP_HOST
|
||||||
|
done
|
||||||
|
echo "APP_HOST is set to ${APP_HOST}"
|
||||||
|
|
||||||
|
while [[ -z "$MESH_HOST" ]] && [[ "$MESH_HOST" != *[.]*[.]* ]]
|
||||||
|
do
|
||||||
|
echo -ne "Enter the subdomain for meshcentral (e.g. mesh.example.com): "
|
||||||
|
read MESH_HOST
|
||||||
|
done
|
||||||
|
echo "MESH_HOST is set to ${MESH_HOST}"
|
||||||
|
|
||||||
|
while [[ -z "$EMAIL" ]] && [[ "$EMAIL" != *[@]*[.]* ]]
|
||||||
|
do
|
||||||
|
echo -ne "Enter a valid email address for django and meshcentral: "
|
||||||
|
read EMAIL
|
||||||
|
done
|
||||||
|
echo "EMAIL is set to ${EMAIL}"
|
||||||
|
|
||||||
|
while [[ -z "$USERNAME" ]]
|
||||||
|
do
|
||||||
|
echo -ne "Set username for mesh and tactical login: "
|
||||||
|
read USERNAME
|
||||||
|
done
|
||||||
|
echo "USERNAME is set to ${USERNAME}"
|
||||||
|
|
||||||
|
while [[ -z "$PASSWORD" ]]
|
||||||
|
do
|
||||||
|
echo -ne "Set password for mesh and tactical password: "
|
||||||
|
read PASSWORD
|
||||||
|
done
|
||||||
|
echo "PASSWORD is set"
|
||||||
|
|
||||||
|
# check if let's encrypt or cert-keys options were set
|
||||||
|
if [[ -z "$LETS_ENCRYPT" ]] && [[ -z "$CERT_PRIV_FILE" ]] || [[ -z "$CERT_PUB_FILE" ]]; then
|
||||||
|
echo -ne "Create a let's encrypt certificate?[Y,n]: "
|
||||||
|
read USE_LETS_ENCRYPT
|
||||||
|
|
||||||
|
[[ "$USE_LETS_ENCRYPT" == "" ]] || [[ "$USE_LETS_ENCRYPT" ~= [Yy] ]] && LETS_ENCRYPT=1
|
||||||
|
|
||||||
|
if [[ -z "$LET_ENCRYPT" ]]; then
|
||||||
|
echo "Let's Encrypt will not be used"
|
||||||
|
|
||||||
|
echo -ne "Do you want to specify paths to a certificate public key and private key?[Y,n]: "
|
||||||
|
read PRIVATE_CERTS
|
||||||
|
|
||||||
|
if [[ "$PRIVATE_CERTS" == "" ]] || [[ "$PRIVATE_CERTS" ~= [yY] ]]; then
|
||||||
|
|
||||||
|
# check for valid public certificate file
|
||||||
|
while [[ ! -f $CERT_PUB_FILE ]]
|
||||||
|
do
|
||||||
|
echo -ne "Enter a valid full path to public key file: "
|
||||||
|
read CERT_PUB_FILE
|
||||||
|
done
|
||||||
|
|
||||||
|
# check for valid private key file
|
||||||
|
while [[ ! -f $CERT_PRIV_FILE ]]
|
||||||
|
do
|
||||||
|
echo -ne "Enter a valid full path to private key file: "
|
||||||
|
read CERT_PRIV_FILE
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function encode_certificates {
|
||||||
|
echo "Base64 encoding certificates"
|
||||||
|
CERT_PUB_BASE64="$(sudo base64 -w 0 ${CERT_PUB_FILE})"
|
||||||
|
CERT_PRIV_BASE64="$(sudo base64 -w 0 ${CERT_PRIV_FILE})"
|
||||||
|
}
|
||||||
|
|
||||||
|
function generate_env {
|
||||||
|
[[ -f "$ENV_FILE" ]] && echo "Env file already exists"; return 0;
|
||||||
|
|
||||||
|
local mongodb_user=$(cat /dev/urandom | tr -dc 'a-z' | fold -w 8 | head -n 1)
|
||||||
|
local mongodb_pass=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
|
||||||
|
local postgres_user=$(cat /dev/urandom | tr -dc 'a-z' | fold -w 8 | head -n 1)
|
||||||
|
local postgres_pass=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
|
||||||
|
|
||||||
|
echo "Generating env file in ${INSTALL_DIR}"
|
||||||
|
local config_file="$(cat << EOF
|
||||||
|
IMAGE_REPO=${REPO}
|
||||||
|
VERSION=${VERSION}
|
||||||
|
TRMM_USER=${USERNAME}
|
||||||
|
TRMM_PASS=${PASSWORD}
|
||||||
|
APP_HOST=${APP_HOST}
|
||||||
|
API_HOST=${API_HOST}
|
||||||
|
MESH_HOST=${MESH_HOST}
|
||||||
|
MESH_USER=${USERNAME}
|
||||||
|
MESH_PASS=${PASSWORD}
|
||||||
|
MONGODB_USER=${mongogb_user}
|
||||||
|
MONGODB_PASSWORD=${mongodb_pass}
|
||||||
|
POSTGRES_USER=${postgres_user}
|
||||||
|
POSTGRES_PASS=${postgres_pass}
|
||||||
|
EOF
|
||||||
|
)"
|
||||||
|
echo "${env_file}" > "$ENV_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
function update_env_field {
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_env_field {
|
||||||
|
local search_field="$1"
|
||||||
|
awk -F "=" '{if ($1==$search_field) { print $2" } }' $ENV_FILE
|
||||||
|
}
|
||||||
|
|
||||||
|
function initiate_letsencrypt {
|
||||||
|
echo "Starting Let's Encrypt"
|
||||||
|
|
||||||
|
ROOT_DOMAIN=$(echo ${API_HOST} | cut -d "." -f2- )
|
||||||
|
|
||||||
|
echo "Root domain is ${ROOTDOMAIN}"
|
||||||
|
sudo certbot certonly --manual -d *.${ROOT_DOMAIN} --agree-tos --no-bootstrap --manual-public-ip-logging-ok --preferred-challenges dns -m ${EMAIL} --no-eff-email
|
||||||
|
while [[ $? -ne 0 ]]
|
||||||
|
do
|
||||||
|
sudo certbot certonly --manual -d *.${ROOT_DOMAIN} --agree-tos --no-bootstrap --manual-public-ip-logging-ok --preferred-challenges dns -m ${EMAIL} --no-eff-email
|
||||||
|
done
|
||||||
|
|
||||||
|
CERT_PRIV_FILE=/etc/letsencrypt/live/${ROOT_DOMAIN}/privkey.pem
|
||||||
|
CERT_PUB_FILE=/etc/letsencrypt/live/${ROOT_DOMAIN}/fullchain.pem
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# setup defaults
|
||||||
|
# keep track of first arg
|
||||||
|
FIRST_ARG="$1"
|
||||||
|
|
||||||
|
# defaults
|
||||||
|
REPO="tacticalrmm/"
|
||||||
|
BRANCH="master"
|
||||||
|
VERSION="latest"
|
||||||
|
|
||||||
|
# file locations
|
||||||
|
INSTALL_DIR=/opt/tactical
|
||||||
|
ENV_FILE=/opt/tactical/.env
|
||||||
|
|
||||||
|
# check prerequisites
|
||||||
|
command -v docker >/dev/null 2>&1 || { echo >&2 "Docker must be installed. Exiting..."; exit 1; }
|
||||||
|
command -v docker-compose >/dev/null 2>&1 || { echo >&2 "Docker Compose must be installed. Exiting..."; exit 1; }
|
||||||
|
command -v curl >/dev/null 2>&1 || { echo >&2 "Curl must be installed. Exiting..."; exit 1; }
|
||||||
|
command -v bash >/dev/null 2>&1 || { echo >&2 "Bash must be installed. Exiting..."; exit 1; }
|
||||||
|
|
||||||
|
# check for arguments
|
||||||
|
[ -z "$1" ] && echo >&2 "No arguments supplied. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
# parse arguments
|
||||||
|
while [[ $# -gt 0 ]]
|
||||||
|
do
|
||||||
|
key="$1"
|
||||||
|
|
||||||
|
case $key in
|
||||||
|
# install arg
|
||||||
|
-i|install)
|
||||||
|
[[ "$key" != "$FIRST_ARG" ]] && echo >&2 "install must be the first argument. Exiting.."; exit 1;
|
||||||
|
MODE="install"
|
||||||
|
shift # past argument
|
||||||
|
;;
|
||||||
|
|
||||||
|
# update arg
|
||||||
|
-u|update)
|
||||||
|
[[ "$key" != "$FIRST_ARG" ]] && echo >&2 "update must be the first argument. Exiting..."; exit 1;
|
||||||
|
MODE="update"
|
||||||
|
shift # past argument
|
||||||
|
;;
|
||||||
|
|
||||||
|
# backup arg
|
||||||
|
-b|backup)
|
||||||
|
[[ "$key" != "$FIRST_ARG" ]] && echo >&2 "backup must be the first argument. Exiting..."; exit 1;
|
||||||
|
MODE="backup"
|
||||||
|
shift # past argument
|
||||||
|
;;
|
||||||
|
|
||||||
|
# restore arg
|
||||||
|
-r|restore)
|
||||||
|
[[ "$key" != "$FIRST_ARG" ]] && echo >&2 "restore must be the first argument. Exiting..."; exit 1;
|
||||||
|
MODE="restore"
|
||||||
|
shift # past argument
|
||||||
|
;;
|
||||||
|
|
||||||
|
# update-cert arg
|
||||||
|
-c|update-cert)
|
||||||
|
[[ "$key" != "$FIRST_ARG" ]] && echo >&2 "update-cert must be the first argument. Exiting..."; exit 1;
|
||||||
|
MODE="update-cert"
|
||||||
|
shift # past argument
|
||||||
|
;;
|
||||||
|
|
||||||
|
# use-lets-encrypt arg
|
||||||
|
--use-lets-encrypt)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install or update-cert as first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] || [[ "$MODE" != "update-cert" ]] && \
|
||||||
|
echo >&2 "--use-lets-encrypt option only valid for install and update-cert. Exiting..."; exit 1;
|
||||||
|
LETS_ENCRYPT=1
|
||||||
|
shift # past argument
|
||||||
|
;;
|
||||||
|
|
||||||
|
# cert-priv-file arg
|
||||||
|
--cert-priv-file)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install or update-cert first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] || [[ "$MODE" != "update-cert" ]] && \
|
||||||
|
echo >&2 "--cert-priv-file option only valid for install and update-cert. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
[ ! -f "$key" ] && echo >&2 "Certificate private key file $key does not exist. Use absolute paths. Exiting..."; exit 1;
|
||||||
|
CERT_PRIV_FILE="$key"
|
||||||
|
shift # past value
|
||||||
|
;;
|
||||||
|
|
||||||
|
# cert-pub-file arg
|
||||||
|
--cert-pub-file)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install or update-cert first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] || [[ "$MODE" != "update-cert" ]] && \
|
||||||
|
echo >&2 "--cert-pub-file option only valid for install and update-cert. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
[ ! -f "$key" ] && echo >&2 "Certificate public Key file ${key} does not exist. Use absolute paths. Exiting..."; exit 1;
|
||||||
|
CERT_PUB_FILE="$key"
|
||||||
|
shift # past value
|
||||||
|
;;
|
||||||
|
|
||||||
|
# local arg
|
||||||
|
--local)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install or update first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] || [[ "$MODE" != "update" ]] && \
|
||||||
|
echo >&2 "--local option only valid for install and update. Exiting..."; exit 1;
|
||||||
|
REPO=""
|
||||||
|
shift # past argument
|
||||||
|
;;
|
||||||
|
|
||||||
|
# branch arg
|
||||||
|
--branch)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install or update first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] || [[ "$MODE" != "update" ]] && \
|
||||||
|
echo >&2 "--branch option only valid for install and update. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
BRANCH="$key"
|
||||||
|
shift # past value
|
||||||
|
;;
|
||||||
|
|
||||||
|
# version arg
|
||||||
|
--version)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install or update first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] || [[ "$MODE" != "update" ]] && \
|
||||||
|
echo ">&2 --version option only valid for install and update. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
VERSION="$key"
|
||||||
|
shift # past value
|
||||||
|
;;
|
||||||
|
|
||||||
|
# noninteractive arg
|
||||||
|
--noninteractive)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] && echo >&2 "--noninteractive option only valid for install. Exiting..."; exit 1;
|
||||||
|
NONINTERACTIVE=1
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
;;
|
||||||
|
|
||||||
|
# app host arg
|
||||||
|
--app-host)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] && echo >&2 "--app-host option only valid for install. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
APP_HOST="$key"
|
||||||
|
shift # past value
|
||||||
|
;;
|
||||||
|
|
||||||
|
# api host arg
|
||||||
|
--api-host)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] && echo >&2 "--api-host option only valid for install. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
API_HOST="$key"
|
||||||
|
shift # past value
|
||||||
|
;;
|
||||||
|
|
||||||
|
# mesh host arg
|
||||||
|
--mesh-host)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] && echo >&2 "--mesh-host option only valid for install. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
MESH_HOST="$key"
|
||||||
|
shift # past value
|
||||||
|
;;
|
||||||
|
|
||||||
|
# tactical user arg
|
||||||
|
--tactical-user)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] && echo >&2 "--tactical-user option only valid for install. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
USERNAME="$key"
|
||||||
|
shift # past value
|
||||||
|
;;
|
||||||
|
|
||||||
|
# tactical password arg
|
||||||
|
--tactical-password)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] && echo >&2 "--tactical-password option only valid for install. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
PASSWORD="$key"
|
||||||
|
shift # past value
|
||||||
|
;;
|
||||||
|
|
||||||
|
# email arg
|
||||||
|
--email)
|
||||||
|
[[ -z "$MODE" ]] && echo >&2 "Missing install first argument. Exiting..."; exit 1;
|
||||||
|
[[ "$MODE" != "install" ]] && echo >&2 "--email option only valid for install. Exiting..."; exit 1;
|
||||||
|
|
||||||
|
shift # past argument
|
||||||
|
EMAIL="$key"
|
||||||
|
shift # past value
|
||||||
|
;;
|
||||||
|
|
||||||
|
# Unknown arg
|
||||||
|
*)
|
||||||
|
echo "Unknown argument ${$1}. Exiting..."
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
|
||||||
|
# for install mode
|
||||||
|
if [[ "$MODE" == "install" ]]; then
|
||||||
|
echo "Starting installation in ${INSTALL_DIR}"
|
||||||
|
|
||||||
|
# move to install dir
|
||||||
|
mkdir -p "${INSTALL_DIR}"
|
||||||
|
cd "$INSTALL_DIR"
|
||||||
|
|
||||||
|
# pull docker-compose.yml file
|
||||||
|
echo "Downloading docker-compose.yml from branch ${branch}"
|
||||||
|
COMPOSE_FILE="https://raw.githubusercontent.com/wh1te909/tacticalrmm/${branch}/docker/docker-compose.yml"
|
||||||
|
if ! curl -sS "${COMPOSE_FILE}"; then
|
||||||
|
echo >&2 "Failed to download installation package ${COMPOSE_FILE}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# check if install is noninteractive
|
||||||
|
if [[ -z "$NONINTERACTIVE" ]]; then
|
||||||
|
# ask user for information not supplied as arguments
|
||||||
|
ask_questions
|
||||||
|
|
||||||
|
else
|
||||||
|
echo "NonInteractive mode set."
|
||||||
|
# check for required noninteractive arguments
|
||||||
|
[[ -z "$API_HOST" ]] || \
|
||||||
|
[[ -z "$APP_HOST" ]] || \
|
||||||
|
[[ -z "$MESH_HOST" ]] || \
|
||||||
|
[[ -z "$EMAIL" ]] || \
|
||||||
|
[[ -z "$USERNAME" ]] || \
|
||||||
|
[[ -z "$PASSWORD" ]] && \
|
||||||
|
echo "You must supply additional arguments for noninteractive install."; exit 1;
|
||||||
|
fi
|
||||||
|
|
||||||
|
# if certificates are available base64 encode them
|
||||||
|
if [[ -n "$LET_ENCRYPT" ]] && [[ -z "$NONINTERACTIVE" ]]; then
|
||||||
|
initiate_letsencrypt
|
||||||
|
encode_certificates
|
||||||
|
elif [[ -n "$CERT_PUB_FILE" ]] && [[ -n "$CERT_PRIV_FILE" ]]; then
|
||||||
|
encode_certificates
|
||||||
|
|
||||||
|
# generate config file
|
||||||
|
generate_config
|
||||||
|
|
||||||
|
# generate env file
|
||||||
|
generate_env
|
||||||
|
|
||||||
|
echo "Configuration complete. Starting environment."
|
||||||
|
# start environment
|
||||||
|
docker-compose pull
|
||||||
|
docker-compose up -d
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
|
# for update mode
|
||||||
|
if [[ "$MODE" == "update" ]]; then
|
||||||
|
[[ "$VERSION" != "latest" ]]
|
||||||
|
docker-compose pull
|
||||||
|
docker-compose up -d
|
||||||
|
fi
|
||||||
|
|
||||||
|
# for update cert mode
|
||||||
|
if [[ "$MODE" == "update-cert" ]]; then
|
||||||
|
# check for required parameters
|
||||||
|
[[ -z "$LET_ENCRYPT" ]] || \
|
||||||
|
[[ -z "$CERT_PUB_FILE" ]] && \
|
||||||
|
[[ -z "$CERT_PRIV_FILE" ]] && \
|
||||||
|
echo >&2 "Provide the --lets-encrypt option or use --cert-pub-file and --cert-priv-file. Exiting..."; exit;
|
||||||
|
|
||||||
|
if [[ -n "$LET_ENCRYPT" ]]; then
|
||||||
|
initiate_letsencrypt
|
||||||
|
encode_certificates
|
||||||
|
generate_env
|
||||||
|
elif [[ -n "$CERT_PUB_FILE" ]] && [[ -n "$CERT_PRIV_FILE" ]]; then
|
||||||
|
encode_certificates
|
||||||
|
generate_env
|
||||||
|
|
||||||
|
docker-compose restart
|
||||||
|
fi
|
||||||
|
|
||||||
|
# for backup mode
|
||||||
|
if [[ "$MODE" == "backup" ]]; then
|
||||||
|
echo "backup not yet implemented"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# for restore mode
|
||||||
|
if [[ "$MODE" == "restore" ]] then;
|
||||||
|
echo "restore not yet implemented"
|
||||||
|
fi
|
||||||
6
docs/package-lock.json
generated
6
docs/package-lock.json
generated
@@ -5421,9 +5421,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"ini": {
|
"ini": {
|
||||||
"version": "1.3.5",
|
"version": "1.3.8",
|
||||||
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
|
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
|
||||||
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==",
|
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"internal-ip": {
|
"internal-ip": {
|
||||||
|
|||||||
17
go.mod
Normal file
17
go.mod
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
module github.com/wh1te909/tacticalrmm
|
||||||
|
|
||||||
|
go 1.15
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/go-resty/resty/v2 v2.3.0
|
||||||
|
github.com/josephspurrier/goversioninfo v1.2.0
|
||||||
|
github.com/kr/pretty v0.1.0 // indirect
|
||||||
|
github.com/nats-io/nats.go v1.10.1-0.20210107160453-a133396829fc
|
||||||
|
github.com/ugorji/go/codec v1.2.2
|
||||||
|
github.com/wh1te909/rmmagent v1.1.13-0.20210112033642-9b310c2c7f53
|
||||||
|
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102 // indirect
|
||||||
|
golang.org/x/sys v0.0.0-20201113233024-12cec1faf1ba // indirect
|
||||||
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect
|
||||||
|
)
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user