Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
617738bb28 | ||
|
|
b63b2002a9 | ||
|
|
4d27f2b594 | ||
|
|
293f44c91a | ||
|
|
c2a9685480 | ||
|
|
cf0941cda9 | ||
|
|
1f77acdd22 | ||
|
|
192e418d08 | ||
|
|
4992002a28 | ||
|
|
29b04ee2f2 | ||
|
|
081ad3c30b | ||
|
|
3287b4a23b |
@@ -1,31 +0,0 @@
|
||||
COMPOSE_PROJECT_NAME=trmm
|
||||
|
||||
IMAGE_REPO=tacticalrmm/
|
||||
VERSION=latest
|
||||
|
||||
# tactical credentials (Used to login to dashboard)
|
||||
TRMM_USER=tactical
|
||||
TRMM_PASS=tactical
|
||||
|
||||
# dns settings
|
||||
APP_HOST=rmm.example.com
|
||||
API_HOST=api.example.com
|
||||
MESH_HOST=mesh.example.com
|
||||
|
||||
# mesh settings
|
||||
MESH_USER=tactical
|
||||
MESH_PASS=tactical
|
||||
MONGODB_USER=mongouser
|
||||
MONGODB_PASSWORD=mongopass
|
||||
|
||||
# database settings
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASS=postgrespass
|
||||
|
||||
# DEV SETTINGS
|
||||
APP_PORT=443
|
||||
API_PORT=80
|
||||
HTTP_PROTOCOL=https
|
||||
DOCKER_NETWORK=172.21.0.0/24
|
||||
DOCKER_NGINX_IP=172.21.0.20
|
||||
NATS_PORTS=4222:4222
|
||||
@@ -1,38 +0,0 @@
|
||||
# pulls community scripts from git repo
|
||||
FROM python:3.11.8-slim AS GET_SCRIPTS_STAGE
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends git && \
|
||||
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
||||
|
||||
FROM python:3.11.8-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
ENV WORKSPACE_DIR /workspace
|
||||
ENV TACTICAL_USER tactical
|
||||
ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 8000 8383 8005
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y build-essential weasyprint
|
||||
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# copy community scripts
|
||||
COPY --from=GET_SCRIPTS_STAGE /community-scripts /community-scripts
|
||||
|
||||
# Copy dev python reqs
|
||||
COPY .devcontainer/requirements.txt /
|
||||
|
||||
# Copy docker entrypoint.sh
|
||||
COPY .devcontainer/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm
|
||||
@@ -1,234 +0,0 @@
|
||||
version: '3.4'
|
||||
|
||||
services:
|
||||
api-dev:
|
||||
container_name: trmm-api-dev
|
||||
image: api-dev
|
||||
restart: always
|
||||
user: 1000:1000
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: .devcontainer/api.dockerfile
|
||||
command: [ "tactical-api" ]
|
||||
environment:
|
||||
API_PORT: ${API_PORT}
|
||||
ports:
|
||||
- "8000:${API_PORT}"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-backend
|
||||
|
||||
# nats
|
||||
nats-dev:
|
||||
container_name: trmm-nats-dev
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
restart: always
|
||||
user: 1000:1000
|
||||
environment:
|
||||
API_HOST: ${API_HOST}
|
||||
API_PORT: ${API_PORT}
|
||||
DEV: 1
|
||||
ports:
|
||||
- "${NATS_PORTS}"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- ${API_HOST}
|
||||
- tactical-nats
|
||||
|
||||
# meshcentral container
|
||||
meshcentral-dev:
|
||||
container_name: trmm-meshcentral-dev
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
user: 1000:1000
|
||||
environment:
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
MONGODB_USER: ${MONGODB_USER}
|
||||
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||
NGINX_HOST_IP: ${DOCKER_NGINX_IP}
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-meshcentral
|
||||
- ${MESH_HOST}
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- mesh-data-dev:/home/node/app/meshcentral-data
|
||||
depends_on:
|
||||
- mongodb-dev
|
||||
|
||||
# mongodb container for meshcentral
|
||||
mongodb-dev:
|
||||
container_name: trmm-mongodb-dev
|
||||
image: mongo:4.4
|
||||
restart: always
|
||||
user: 1000:1000
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
|
||||
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
|
||||
MONGO_INITDB_DATABASE: meshcentral
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-mongodb
|
||||
volumes:
|
||||
- mongo-dev-data:/data/db
|
||||
|
||||
# postgres database for api service
|
||||
postgres-dev:
|
||||
container_name: trmm-postgres-dev
|
||||
image: postgres:13-alpine
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_DB: ${POSTGRES_DB}
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASS}
|
||||
volumes:
|
||||
- postgres-data-dev:/var/lib/postgresql/data
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-postgres
|
||||
|
||||
# redis container for celery tasks
|
||||
redis-dev:
|
||||
container_name: trmm-redis-dev
|
||||
restart: always
|
||||
user: 1000:1000
|
||||
command: redis-server
|
||||
image: redis:6.0-alpine
|
||||
volumes:
|
||||
- redis-data-dev:/data
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-redis
|
||||
|
||||
init-dev:
|
||||
container_name: trmm-init-dev
|
||||
image: api-dev
|
||||
restart: on-failure
|
||||
command: [ "tactical-init-dev" ]
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASS: ${POSTGRES_PASS}
|
||||
APP_HOST: ${APP_HOST}
|
||||
API_HOST: ${API_HOST}
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
TRMM_USER: ${TRMM_USER}
|
||||
TRMM_PASS: ${TRMM_PASS}
|
||||
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
|
||||
APP_PORT: ${APP_PORT}
|
||||
POSTGRES_DB: ${POSTGRES_DB}
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- meshcentral-dev
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- mesh-data-dev:/meshcentral-data
|
||||
- redis-data-dev:/redis/data
|
||||
- mongo-dev-data:/mongo/data/db
|
||||
- ..:/workspace:cached
|
||||
|
||||
# container for celery worker service
|
||||
celery-dev:
|
||||
container_name: trmm-celery-dev
|
||||
image: api-dev
|
||||
command: [ "tactical-celery-dev" ]
|
||||
restart: always
|
||||
user: 1000:1000
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
# container for celery beat service
|
||||
celerybeat-dev:
|
||||
container_name: trmm-celerybeat-dev
|
||||
image: api-dev
|
||||
command: [ "tactical-celerybeat-dev" ]
|
||||
restart: always
|
||||
user: 1000:1000
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
# container for websockets communication
|
||||
websockets-dev:
|
||||
container_name: trmm-websockets-dev
|
||||
image: api-dev
|
||||
command: [ "tactical-websockets-dev" ]
|
||||
restart: always
|
||||
user: 1000:1000
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-websockets
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
# container for tactical reverse proxy
|
||||
nginx-dev:
|
||||
container_name: trmm-nginx-dev
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
restart: always
|
||||
user: 1000:1000
|
||||
environment:
|
||||
APP_HOST: ${APP_HOST}
|
||||
API_HOST: ${API_HOST}
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
CERT_PUB_KEY: ${CERT_PUB_KEY}
|
||||
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||
APP_PORT: ${APP_PORT}
|
||||
API_PORT: ${API_PORT}
|
||||
DEV: 1
|
||||
networks:
|
||||
dev:
|
||||
ipv4_address: ${DOCKER_NGINX_IP}
|
||||
ports:
|
||||
- "80:8080"
|
||||
- "443:4443"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
|
||||
volumes:
|
||||
tactical-data-dev: null
|
||||
postgres-data-dev: null
|
||||
mongo-dev-data: null
|
||||
mesh-data-dev: null
|
||||
redis-data-dev: null
|
||||
|
||||
networks:
|
||||
dev:
|
||||
driver: bridge
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: ${DOCKER_NETWORK}
|
||||
@@ -1,193 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
: "${TRMM_USER:=tactical}"
|
||||
: "${TRMM_PASS:=tactical}"
|
||||
: "${POSTGRES_HOST:=tactical-postgres}"
|
||||
: "${POSTGRES_PORT:=5432}"
|
||||
: "${POSTGRES_USER:=tactical}"
|
||||
: "${POSTGRES_PASS:=tactical}"
|
||||
: "${POSTGRES_DB:=tacticalrmm}"
|
||||
: "${MESH_SERVICE:=tactical-meshcentral}"
|
||||
: "${MESH_WS_URL:=ws://${MESH_SERVICE}:4443}"
|
||||
: "${MESH_USER:=meshcentral}"
|
||||
: "${MESH_PASS:=meshcentralpass}"
|
||||
: "${MESH_HOST:=tactical-meshcentral}"
|
||||
: "${API_HOST:=tactical-backend}"
|
||||
: "${REDIS_HOST:=tactical-redis}"
|
||||
: "${API_PORT:=8000}"
|
||||
|
||||
: "${CERT_PRIV_PATH:=${TACTICAL_DIR}/certs/privkey.pem}"
|
||||
: "${CERT_PUB_PATH:=${TACTICAL_DIR}/certs/fullchain.pem}"
|
||||
|
||||
# Add python venv to path
|
||||
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
||||
|
||||
function check_tactical_ready {
|
||||
sleep 15
|
||||
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||
echo "waiting for init container to finish install or update..."
|
||||
sleep 10
|
||||
done
|
||||
}
|
||||
|
||||
function django_setup {
|
||||
until (echo >/dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &>/dev/null; do
|
||||
echo "waiting for postgresql container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
until (echo >/dev/tcp/"${MESH_SERVICE}"/4443) &>/dev/null; do
|
||||
echo "waiting for meshcentral container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
echo "setting up django environment"
|
||||
|
||||
# configure django settings
|
||||
MESH_TOKEN="$(cat ${TACTICAL_DIR}/tmp/mesh_token)"
|
||||
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
|
||||
BASE_DOMAIN=$(echo "import tldextract; no_fetch_extract = tldextract.TLDExtract(suffix_list_urls=()); extracted = no_fetch_extract('${API_HOST}'); print(f'{extracted.domain}.{extracted.suffix}')" | python)
|
||||
|
||||
localvars="$(
|
||||
cat <<EOF
|
||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||
|
||||
DEBUG = True
|
||||
|
||||
DOCKER_BUILD = True
|
||||
|
||||
SWAGGER_ENABLED = True
|
||||
|
||||
CERT_FILE = '${CERT_PUB_PATH}'
|
||||
KEY_FILE = '${CERT_PRIV_PATH}'
|
||||
|
||||
SCRIPTS_DIR = '/community-scripts'
|
||||
|
||||
ADMIN_URL = 'admin/'
|
||||
|
||||
ALLOWED_HOSTS = ['${API_HOST}', '${APP_HOST}', '*']
|
||||
|
||||
CORS_ORIGIN_WHITELIST = ['https://${APP_HOST}']
|
||||
|
||||
SESSION_COOKIE_DOMAIN = '${BASE_DOMAIN}'
|
||||
CSRF_COOKIE_DOMAIN = '${BASE_DOMAIN}'
|
||||
CSRF_TRUSTED_ORIGINS = ['https://${API_HOST}', 'https://${APP_HOST}']
|
||||
|
||||
HEADLESS_FRONTEND_URLS = {'socialaccount_login_error': 'https://${APP_HOST}/account/provider/callback'}
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': '${POSTGRES_DB}',
|
||||
'USER': '${POSTGRES_USER}',
|
||||
'PASSWORD': '${POSTGRES_PASS}',
|
||||
'HOST': '${POSTGRES_HOST}',
|
||||
'PORT': '${POSTGRES_PORT}',
|
||||
},
|
||||
'reporting': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': '${POSTGRES_DB}',
|
||||
'USER': 'reporting_user',
|
||||
'PASSWORD': 'read_password',
|
||||
'HOST': '${POSTGRES_HOST}',
|
||||
'PORT': '${POSTGRES_PORT}',
|
||||
'OPTIONS': {
|
||||
'options': '-c default_transaction_read_only=on'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
REDIS_HOST = '${REDIS_HOST}'
|
||||
MESH_WS_URL = '${MESH_WS_URL}'
|
||||
ADMIN_ENABLED = True
|
||||
TRMM_INSECURE = True
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${localvars}" >${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
|
||||
# run migrations and init scripts
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py pre_update_tasks
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py generate_json_schemas
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_mesh_setup
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_chocos
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py reload_nats
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_natsapi_conf
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_installer_user
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py post_update_tasks
|
||||
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
}
|
||||
|
||||
if [ "$1" = 'tactical-init-dev' ]; then
|
||||
|
||||
# make directories if they don't exist
|
||||
mkdir -p "${TACTICAL_DIR}/tmp"
|
||||
|
||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||
|
||||
mkdir -p /meshcentral-data
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
mkdir -p ${TACTICAL_DIR}/certs
|
||||
mkdir -p ${TACTICAL_DIR}/reporting
|
||||
mkdir -p ${TACTICAL_DIR}/reporting/assets
|
||||
mkdir -p /mongo/data/db
|
||||
mkdir -p /redis/data
|
||||
touch /meshcentral-data/.initialized && chown -R 1000:1000 /meshcentral-data
|
||||
touch ${TACTICAL_DIR}/tmp/.initialized && chown -R 1000:1000 ${TACTICAL_DIR}
|
||||
touch ${TACTICAL_DIR}/certs/.initialized && chown -R 1000:1000 ${TACTICAL_DIR}/certs
|
||||
touch /mongo/data/db/.initialized && chown -R 1000:1000 /mongo/data/db
|
||||
touch /redis/data/.initialized && chown -R 1000:1000 /redis/data
|
||||
touch ${TACTICAL_DIR}/reporting && chown -R 1000:1000 ${TACTICAL_DIR}/reporting
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/log
|
||||
touch ${TACTICAL_DIR}/api/tacticalrmm/private/log/django_debug.log
|
||||
|
||||
# setup Python virtual env and install dependencies
|
||||
! test -e "${VIRTUAL_ENV}" && python -m venv ${VIRTUAL_ENV}
|
||||
"${VIRTUAL_ENV}"/bin/python -m pip install --upgrade pip
|
||||
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir setuptools wheel
|
||||
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt
|
||||
|
||||
django_setup
|
||||
|
||||
# chown everything to tactical user
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
|
||||
|
||||
# create install ready file
|
||||
su -c "echo 'tactical-init' > ${TACTICAL_READY_FILE}" "${TACTICAL_USER}"
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-api' ]; then
|
||||
check_tactical_ready
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}"
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celery-dev' ]; then
|
||||
check_tactical_ready
|
||||
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm worker -l debug
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||
check_tactical_ready
|
||||
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-websockets-dev' ]; then
|
||||
check_tactical_ready
|
||||
"${VIRTUAL_ENV}"/bin/daphne tacticalrmm.asgi:application --port 8383 -b 0.0.0.0
|
||||
fi
|
||||
@@ -1,3 +0,0 @@
|
||||
-r /workspace/api/tacticalrmm/requirements.txt
|
||||
-r /workspace/api/tacticalrmm/requirements-dev.txt
|
||||
-r /workspace/api/tacticalrmm/requirements-test.txt
|
||||
@@ -1,25 +0,0 @@
|
||||
**/__pycache__
|
||||
**/.classpath
|
||||
**/.dockerignore
|
||||
**/.env
|
||||
**/.git
|
||||
**/.gitignore
|
||||
**/.project
|
||||
**/.settings
|
||||
**/.toolstarget
|
||||
**/.vs
|
||||
**/.vscode
|
||||
**/*.*proj.user
|
||||
**/*.dbmdl
|
||||
**/*.jfm
|
||||
**/azds.yaml
|
||||
**/charts
|
||||
**/docker-compose*
|
||||
**/Dockerfile*
|
||||
**/node_modules
|
||||
**/npm-debug.log
|
||||
**/obj
|
||||
**/secrets.dev.yaml
|
||||
**/values.dev.yaml
|
||||
**/env
|
||||
README.md
|
||||
12
.github/FUNDING.yml
vendored
12
.github/FUNDING.yml
vendored
@@ -1,12 +0,0 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: amidaware
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # tacticalrmm
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,41 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a bug report
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Server Info (please complete the following information):**
|
||||
- OS: [e.g. Ubuntu 20.04, Debian 10]
|
||||
- Browser: [e.g. chrome, safari]
|
||||
- RMM Version (as shown in top left of web UI):
|
||||
|
||||
**Installation Method:**
|
||||
- [ ] Standard
|
||||
- [ ] Standard with `--insecure` flag at install
|
||||
- [ ] Docker
|
||||
|
||||
**Agent Info (please complete the following information):**
|
||||
- Agent version (as shown in the 'Summary' tab of the agent from web UI):
|
||||
- Agent OS: [e.g. Win 10 v2004, Server 2016]
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,20 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
82
.github/workflows/ci-tests.yml
vendored
82
.github/workflows/ci-tests.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Tests CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "*"
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
name: Tests
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.11.8"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: harmon758/postgresql-action@v1
|
||||
with:
|
||||
postgresql version: "15"
|
||||
postgresql db: "pipeline"
|
||||
postgresql user: "pipeline"
|
||||
postgresql password: "pipeline123456"
|
||||
|
||||
- name: Setup Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
|
||||
- name: Install redis
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y redis
|
||||
redis-server --version
|
||||
|
||||
- name: Install requirements
|
||||
working-directory: api/tacticalrmm
|
||||
run: |
|
||||
python --version
|
||||
SETTINGS_FILE="tacticalrmm/settings.py"
|
||||
SETUPTOOLS_VER=$(grep "^SETUPTOOLS_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
|
||||
WHEEL_VER=$(grep "^WHEEL_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
|
||||
pip install --upgrade pip
|
||||
pip install setuptools==${SETUPTOOLS_VER} wheel==${WHEEL_VER}
|
||||
pip install -r requirements.txt -r requirements-test.txt
|
||||
|
||||
- name: Codestyle black
|
||||
working-directory: api
|
||||
run: |
|
||||
black --exclude migrations/ --check tacticalrmm
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Lint with flake8
|
||||
working-directory: api/tacticalrmm
|
||||
run: |
|
||||
flake8 --config .flake8 .
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Run django tests
|
||||
env:
|
||||
GHACTIONS: "yes"
|
||||
working-directory: api/tacticalrmm
|
||||
run: |
|
||||
pytest
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: codecov/codecov-action@v3
|
||||
with:
|
||||
directory: ./api/tacticalrmm
|
||||
files: ./api/tacticalrmm/coverage.xml
|
||||
verbose: true
|
||||
78
.github/workflows/docker-build-push.yml
vendored
78
.github/workflows/docker-build-push.yml
vendored
@@ -1,78 +0,0 @@
|
||||
name: Publish Tactical Docker Images
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
jobs:
|
||||
docker:
|
||||
name: Build and Push Docker Images
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get Github Tag
|
||||
id: prep
|
||||
run: |
|
||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/v}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Tactical Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical:${{ steps.prep.outputs.version }},tacticalrmm/tactical:latest
|
||||
|
||||
- name: Build and Push Tactical MeshCentral Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-meshcentral/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-meshcentral:${{ steps.prep.outputs.version }},tacticalrmm/tactical-meshcentral:latest
|
||||
|
||||
- name: Build and Push Tactical NATS Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-nats/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
||||
|
||||
- name: Build and Push Tactical Frontend Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-frontend/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-frontend:${{ steps.prep.outputs.version }},tacticalrmm/tactical-frontend:latest
|
||||
|
||||
- name: Build and Push Tactical Nginx Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-nginx/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nginx:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nginx:latest
|
||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -34,7 +34,6 @@ app.ini
|
||||
create_services.py
|
||||
gen_random.py
|
||||
sync_salt_modules.py
|
||||
change_times.py
|
||||
rmm-*.exe
|
||||
rmm-*.ps1
|
||||
api/tacticalrmm/accounts/management/commands/*.json
|
||||
@@ -42,20 +41,3 @@ api/tacticalrmm/accounts/management/commands/random_data.py
|
||||
versioninfo.go
|
||||
resource.syso
|
||||
htmlcov/
|
||||
docker-compose.dev.yml
|
||||
docs/.vuepress/dist
|
||||
nats-rmm.conf
|
||||
.mypy_cache
|
||||
docs/site/
|
||||
reset_db.sh
|
||||
run_go_cmd.py
|
||||
nats-api.conf
|
||||
ignore/
|
||||
coverage.lcov
|
||||
daphne.sock.lock
|
||||
.pytest_cache
|
||||
coverage.xml
|
||||
setup_dev.yml
|
||||
11env/
|
||||
query_schema.json
|
||||
gunicorn_config.py
|
||||
43
.travis.yml
Normal file
43
.travis.yml
Normal file
@@ -0,0 +1,43 @@
|
||||
dist: focal
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- language: node_js
|
||||
node_js: "12"
|
||||
before_install:
|
||||
- cd web
|
||||
install:
|
||||
- npm install
|
||||
script:
|
||||
- npm run test:unit
|
||||
|
||||
- language: python
|
||||
python: "3.8"
|
||||
services:
|
||||
- redis
|
||||
|
||||
addons:
|
||||
postgresql: "13"
|
||||
apt:
|
||||
packages:
|
||||
- postgresql-13
|
||||
|
||||
before_script:
|
||||
- psql -c 'CREATE DATABASE travisci;' -U postgres
|
||||
- psql -c "CREATE USER travisci WITH PASSWORD 'travisSuperSekret6645';" -U postgres
|
||||
- psql -c 'GRANT ALL PRIVILEGES ON DATABASE travisci TO travisci;' -U postgres
|
||||
- psql -c 'ALTER USER travisci CREATEDB;' -U postgres
|
||||
|
||||
before_install:
|
||||
- cd api/tacticalrmm
|
||||
|
||||
install:
|
||||
- pip install --no-cache-dir --upgrade pip
|
||||
- pip install --no-cache-dir setuptools==49.6.0 wheel==0.35.1
|
||||
- pip install --no-cache-dir -r requirements.txt -r requirements-test.txt
|
||||
|
||||
script:
|
||||
- coverage run manage.py test -v 2
|
||||
|
||||
after_success:
|
||||
- coveralls
|
||||
23
.vscode/extensions.json
vendored
23
.vscode/extensions.json
vendored
@@ -1,23 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
// frontend
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
"editorconfig.editorconfig",
|
||||
"vue.volar",
|
||||
"wayou.vscode-todo-highlight",
|
||||
|
||||
// python
|
||||
"matangover.mypy",
|
||||
"ms-python.python",
|
||||
|
||||
// golang
|
||||
"golang.go"
|
||||
],
|
||||
"unwantedRecommendations": [
|
||||
"octref.vetur",
|
||||
"hookyqr.beautify",
|
||||
"dbaeumer.jshint",
|
||||
"ms-vscode.vscode-typescript-tslint-plugin"
|
||||
]
|
||||
}
|
||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -14,20 +14,6 @@
|
||||
"0.0.0.0:8000"
|
||||
],
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Django: Docker Remote Attach",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"port": 5678,
|
||||
"host": "localhost",
|
||||
"preLaunchTask": "docker debug",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}/api/tacticalrmm",
|
||||
"remoteRoot": "/workspace/api/tacticalrmm"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
99
.vscode/settings.json
vendored
99
.vscode/settings.json
vendored
@@ -1,59 +1,44 @@
|
||||
{
|
||||
"python.defaultInterpreterPath": "api/env/bin/python",
|
||||
"python.languageServer": "Pylance",
|
||||
"python.analysis.extraPaths": ["api/tacticalrmm", "api/env"],
|
||||
"python.analysis.diagnosticSeverityOverrides": {
|
||||
"reportUnusedImport": "error",
|
||||
"reportDuplicateImport": "error",
|
||||
"reportGeneralTypeIssues": "none",
|
||||
"reportOptionalMemberAccess": "none",
|
||||
},
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": true,
|
||||
"editor.formatOnSave": true,
|
||||
"files.associations": {
|
||||
"**/ansible/**/*.yml": "ansible",
|
||||
"**/docker/**/docker-compose*.yml": "dockercompose"
|
||||
},
|
||||
"files.watcherExclude": {
|
||||
"**/.git/objects/**": true,
|
||||
"**/.git/subtree-cache/**": true,
|
||||
"**/node_modules/": true,
|
||||
"/node_modules/**": true,
|
||||
"**/env/": true,
|
||||
"/env/**": true,
|
||||
"**/__pycache__": true,
|
||||
"/__pycache__/**": true,
|
||||
"**/.cache": true,
|
||||
"**/.eggs": true,
|
||||
"**/.ipynb_checkpoints": true,
|
||||
"**/.mypy_cache": true,
|
||||
"**/.pytest_cache": true,
|
||||
"**/*.egg-info": true,
|
||||
"**/*.feather": true,
|
||||
"**/*.parquet*": true,
|
||||
"**/*.pyc": true,
|
||||
"**/*.zip": true
|
||||
},
|
||||
"go.useLanguageServer": true,
|
||||
"[go]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "never"
|
||||
"python.pythonPath": "api/tacticalrmm/env/bin/python",
|
||||
"python.languageServer": "Pylance",
|
||||
"python.analysis.extraPaths": [
|
||||
"api/tacticalrmm"
|
||||
],
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.formatting.provider": "black",
|
||||
"editor.formatOnSave": true,
|
||||
"vetur.format.defaultFormatter.js": "prettier",
|
||||
"vetur.format.defaultFormatterOptions": {
|
||||
"prettier": {
|
||||
"semi": true,
|
||||
"printWidth": 120,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"arrowParens": "avoid",
|
||||
}
|
||||
},
|
||||
"editor.snippetSuggestions": "none"
|
||||
},
|
||||
"[go.mod]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit"
|
||||
}
|
||||
},
|
||||
"gopls": {
|
||||
"usePlaceholders": true,
|
||||
"completeUnimported": true,
|
||||
"staticcheck": true
|
||||
},
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
}
|
||||
}
|
||||
"vetur.format.options.tabSize": 2,
|
||||
"vetur.format.options.useTabs": false,
|
||||
"files.watcherExclude": {
|
||||
"files.watcherExclude": {
|
||||
"**/.git/objects/**": true,
|
||||
"**/.git/subtree-cache/**": true,
|
||||
"**/node_modules/": true,
|
||||
"/node_modules/**": true,
|
||||
"**/env/": true,
|
||||
"/env/**": true,
|
||||
"**/__pycache__": true,
|
||||
"/__pycache__/**": true,
|
||||
"**/.cache": true,
|
||||
"**/.eggs": true,
|
||||
"**/.ipynb_checkpoints": true,
|
||||
"**/.mypy_cache": true,
|
||||
"**/.pytest_cache": true,
|
||||
"**/*.egg-info": true,
|
||||
"**/*.feather": true,
|
||||
"**/*.parquet*": true,
|
||||
"**/*.pyc": true,
|
||||
"**/*.zip": true
|
||||
},
|
||||
},
|
||||
}
|
||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019-present wh1te909
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
74
LICENSE.md
74
LICENSE.md
@@ -1,74 +0,0 @@
|
||||
### Tactical RMM License Version 1.0
|
||||
|
||||
Text of license:   Copyright © 2022 AmidaWare LLC. All rights reserved.<br>
|
||||
          Amending the text of this license is not permitted.
|
||||
|
||||
Trade Mark:    "Tactical RMM" is a trade mark of AmidaWare LLC.
|
||||
|
||||
Licensor:      AmidaWare LLC of 1968 S Coast Hwy PMB 3847 Laguna Beach, CA, USA.
|
||||
|
||||
Licensed Software:  The software known as Tactical RMM Version v0.12.0 (and all subsequent releases and versions) and the Tactical RMM Agent v2.0.0 (and all subsequent releases and versions).
|
||||
|
||||
### 1. Preamble
|
||||
The Licensed Software is designed to facilitate the remote monitoring and management (RMM) of networks, systems, servers, computers and other devices. The Licensed Software is made available primarily for use by organisations and managed service providers for monitoring and management purposes.
|
||||
|
||||
The Tactical RMM License is not an open-source software license. This license contains certain restrictions on the use of the Licensed Software. For example the functionality of the Licensed Software may not be made available as part of a SaaS (Software-as-a-Service) service or product to provide a commercial or for-profit service without the express prior permission of the Licensor.
|
||||
|
||||
### 2. License Grant
|
||||
Permission is hereby granted, free of charge, on a non-exclusive basis, to copy, modify, create derivative works and use the Licensed Software in source and binary forms subject to the following terms and conditions. No additional rights will be implied under this license.
|
||||
|
||||
* The hosting and use of the Licensed Software to monitor and manage in-house networks/systems and/or customer networks/systems is permitted.
|
||||
|
||||
This license does not allow the functionality of the Licensed Software (whether in whole or in part) or a modified version of the Licensed Software or a derivative work to be used or otherwise made available as part of any other commercial or for-profit service, including, without limitation, any of the following:
|
||||
* a service allowing third parties to interact remotely through a computer network;
|
||||
* as part of a SaaS service or product;
|
||||
* as part of the provision of a managed hosting service or product;
|
||||
* the offering of installation and/or configuration services;
|
||||
* the offer for sale, distribution or sale of any service or product (whether or not branded as Tactical RMM).
|
||||
|
||||
The prior written approval of AmidaWare LLC must be obtained for all commercial use and/or for-profit service use of the (i) Licensed Software (whether in whole or in part), (ii) a modified version of the Licensed Software and/or (iii) a derivative work.
|
||||
|
||||
The terms of this license apply to all copies of the Licensed Software (including modified versions) and derivative works.
|
||||
|
||||
All use of the Licensed Software must immediately cease if use breaches the terms of this license.
|
||||
|
||||
### 3. Derivative Works
|
||||
If a derivative work is created which is based on or otherwise incorporates all or any part of the Licensed Software, and the derivative work is made available to any other person, the complete corresponding machine readable source code (including all changes made to the Licensed Software) must accompany the derivative work and be made publicly available online.
|
||||
|
||||
### 4. Copyright Notice
|
||||
The following copyright notice shall be included in all copies of the Licensed Software:
|
||||
|
||||
   Copyright © 2022 AmidaWare LLC.
|
||||
|
||||
   Licensed under the Tactical RMM License Version 1.0 (the “License”).<br>
|
||||
   You may only use the Licensed Software in accordance with the License.<br>
|
||||
   A copy of the License is available at: https://license.tacticalrmm.com
|
||||
|
||||
### 5. Disclaimer of Warranty
|
||||
THE LICENSED SOFTWARE IS PROVIDED "AS IS". TO THE FULLEST EXTENT PERMISSIBLE AT LAW ALL CONDITIONS, WARRANTIES OR OTHER TERMS OF ANY KIND WHICH MIGHT HAVE EFFECT OR BE IMPLIED OR INCORPORATED, WHETHER BY STATUTE, COMMON LAW OR OTHERWISE ARE HEREBY EXCLUDED, INCLUDING THE CONDITIONS, WARRANTIES OR OTHER TERMS AS TO SATISFACTORY QUALITY AND/OR MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, THE USE OF REASONABLE SKILL AND CARE AND NON-INFRINGEMENT.
|
||||
|
||||
### 6. Limits of Liability
|
||||
THE FOLLOWING EXCLUSIONS SHALL APPLY TO THE FULLEST EXTENT PERMISSIBLE AT LAW. NEITHER THE AUTHORS NOR THE COPYRIGHT HOLDERS SHALL IN ANY CIRCUMSTANCES HAVE ANY LIABILITY FOR ANY CLAIM, LOSSES, DAMAGES OR OTHER LIABILITY, WHETHER THE SAME ARE SUFFERED DIRECTLY OR INDIRECTLY OR ARE IMMEDIATE OR CONSEQUENTIAL, AND WHETHER THE SAME ARISE IN CONTRACT, TORT OR DELICT (INCLUDING NEGLIGENCE) OR OTHERWISE HOWSOEVER ARISING FROM, OUT OF OR IN CONNECTION WITH THE LICENSED SOFTWARE OR THE USE OR INABILITY TO USE THE LICENSED SOFTWARE OR OTHER DEALINGS IN THE LICENSED SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH LOSS OR DAMAGE. THE FOREGOING EXCLUSIONS SHALL INCLUDE, WITHOUT LIMITATION, LIABILITY FOR ANY LOSSES OR DAMAGES WHICH FALL WITHIN ANY OF THE FOLLOWING CATEGORIES: SPECIAL, EXEMPLARY, OR INCIDENTAL LOSS OR DAMAGE, LOSS OF PROFITS, LOSS OF ANTICIPATED SAVINGS, LOSS OF BUSINESS OPPORTUNITY, LOSS OF GOODWILL, AND LOSS OR CORRUPTION OF DATA.
|
||||
|
||||
### 7. Termination
|
||||
This license shall terminate with immediate effect if there is a material breach of any of its terms.
|
||||
|
||||
### 8. No partnership, agency or joint venture
|
||||
Nothing in this license agreement is intended to, or shall be deemed to, establish any partnership or joint venture or any relationship of agency between AmidaWare LLC and any other person.
|
||||
|
||||
### 9. No endorsement
|
||||
The names of the authors and/or the copyright holders must not be used to promote or endorse any products or services which are in any way derived from the Licensed Software without prior written consent.
|
||||
|
||||
### 10. Trademarks
|
||||
No permission is granted to use the trademark “Tactical RMM” or any other trade name, trademark, service mark or product name of AmidaWare LLC except to the extent necessary to comply with the notice requirements in Section 4 (Copyright Notice).
|
||||
|
||||
### 11. Entire agreement
|
||||
This license contains the whole agreement relating to its subject matter.
|
||||
|
||||
|
||||
|
||||
### 12. Severance
|
||||
If any provision or part-provision of this license is or becomes invalid, illegal or unenforceable, it shall be deemed deleted, but that shall not affect the validity and enforceability of the rest of this license.
|
||||
|
||||
### 13. Acceptance of these terms
|
||||
The terms and conditions of this license are accepted by copying, downloading, installing, redistributing, or otherwise using the Licensed Software.
|
||||
135
README.md
135
README.md
@@ -1,53 +1,142 @@
|
||||
# Tactical RMM
|
||||
|
||||

|
||||
[](https://codecov.io/gh/amidaware/tacticalrmm)
|
||||
[](https://travis-ci.com/wh1te909/tacticalrmm)
|
||||
[](https://dev.azure.com/dcparsi/Tactical%20RMM/_build/latest?definitionId=4&branchName=develop)
|
||||
[](https://coveralls.io/github/wh1te909/tacticalrmm?branch=develop)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://github.com/python/black)
|
||||
|
||||
Tactical RMM is a remote monitoring & management tool, built with Django and Vue.\
|
||||
It uses an [agent](https://github.com/amidaware/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang, as well as the [SaltStack](https://github.com/saltstack/salt) api and [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://demo.tacticalrmm.com/)
|
||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
||||
*Tactical RMM is currently in alpha and subject to breaking changes. Use in production at your own risk.*
|
||||
|
||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||
|
||||
### [Documentation](https://docs.tacticalrmm.com)
|
||||
|
||||
## Features
|
||||
|
||||
- Teamviewer-like remote desktop control
|
||||
- Real-time remote shell
|
||||
- Remote file browser (download and upload files)
|
||||
- Remote command and script execution (batch, powershell, python, nushell and deno scripts)
|
||||
- Remote command and script execution (batch, powershell and python scripts)
|
||||
- Event log viewer
|
||||
- Services management
|
||||
- Windows patch management
|
||||
- Automated checks with email/SMS/Webhook alerting (cpu, disk, memory, services, scripts, event logs)
|
||||
- Automated checks with email/SMS alerting (cpu, disk, memory, services, scripts, event logs)
|
||||
- Automated task runner (run scripts on a schedule)
|
||||
- Remote software installation via chocolatey
|
||||
- Software and hardware inventory
|
||||
|
||||
## Windows agent versions supported
|
||||
## Windows versions supported
|
||||
|
||||
- Windows 7, 8.1, 10, 11, Server 2008R2, 2012R2, 2016, 2019, 2022
|
||||
- Windows 7, 8.1, 10, Server 2008R2, 2012R2, 2016, 2019
|
||||
|
||||
## Linux agent versions supported
|
||||
## Installation
|
||||
|
||||
- Any distro with systemd which includes but is not limited to: Debian (10, 11), Ubuntu x86_64 (18.04, 20.04, 22.04), Synology 7, centos, freepbx and more!
|
||||
### Requirements
|
||||
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04)
|
||||
- A domain you own with at least 3 subdomains
|
||||
- Google Authenticator app (2 factor is NOT optional)
|
||||
|
||||
## Mac agent versions supported
|
||||
### Docker
|
||||
Refer to the [docker setup](docker/readme.md)
|
||||
|
||||
- 64 bit Intel and Apple Silicon (M-Series)
|
||||
|
||||
## Sponsorship Features
|
||||
### Installation example (Ubuntu server 20.04 LTS)
|
||||
|
||||
- Mac and Linux Agents
|
||||
- Windows [Code Signed](https://docs.tacticalrmm.com/code_signing/) Agents
|
||||
- Fully Customizable [Reporting](https://docs.tacticalrmm.com/ee/reporting/reporting_overview/) Module
|
||||
- [Single Sign-On](https://docs.tacticalrmm.com/ee/sso/sso/) (SSO)
|
||||
Fresh VPS with latest updates\
|
||||
login as root and create a user and add to sudoers group (we will be creating a user called tactical)
|
||||
```
|
||||
apt update && apt -y upgrade
|
||||
adduser tactical
|
||||
usermod -a -G sudo tactical
|
||||
```
|
||||
|
||||
## Installation / Backup / Restore / Usage
|
||||
switch to the tactical user and setup the firewall
|
||||
```
|
||||
su - tactical
|
||||
sudo ufw default deny incoming
|
||||
sudo ufw default allow outgoing
|
||||
sudo ufw allow ssh
|
||||
sudo ufw allow http
|
||||
sudo ufw allow https
|
||||
sudo ufw allow proto tcp from any to any port 4505,4506
|
||||
sudo ufw enable && sudo ufw reload
|
||||
```
|
||||
|
||||
### Refer to the [documentation](https://docs.tacticalrmm.com)
|
||||
Our domain for this example is tacticalrmm.com
|
||||
|
||||
In the DNS manager of wherever our domain is hosted, we will create three A records, all pointing to the public IP address of our VPS
|
||||
|
||||
Create A record ```api.tacticalrmm.com``` for the django rest backend\
|
||||
Create A record ```rmm.tacticalrmm.com``` for the vue frontend\
|
||||
Create A record ```mesh.tacticalrmm.com``` for meshcentral
|
||||
|
||||
Download the install script and run it
|
||||
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/install.sh
|
||||
chmod +x install.sh
|
||||
./install.sh
|
||||
```
|
||||
|
||||
Links will be provided at the end of the install script.\
|
||||
Download the executable from the first link, then open ```rmm.tacticalrmm.com``` and login.\
|
||||
Upload the executable when prompted during the initial setup page.
|
||||
|
||||
|
||||
### Install an agent
|
||||
From the app's dashboard, choose Agents > Install Agent to generate an installer.
|
||||
|
||||
## Updating
|
||||
Download and run [update.sh](./update.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh))
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh
|
||||
chmod +x update.sh
|
||||
./update.sh
|
||||
```
|
||||
|
||||
## Backup
|
||||
Download [backup.sh](./backup.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh))
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh
|
||||
```
|
||||
Change the postgres username and password at the top of the file (you can find them in `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py` under the DATABASES section)
|
||||
|
||||
Run it
|
||||
```
|
||||
chmod +x backup.sh
|
||||
./backup.sh
|
||||
```
|
||||
|
||||
## Restore
|
||||
Change your 3 A records to point to new server's public IP
|
||||
|
||||
Create same linux user account as old server and add to sudoers group and setup firewall (see install instructions above)
|
||||
|
||||
Copy backup file to new server
|
||||
|
||||
Download the restore script, and edit the postgres username/password at the top of the file. Same instructions as above in the backup steps.
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/restore.sh
|
||||
```
|
||||
|
||||
Run the restore script, passing it the backup tar file as the first argument
|
||||
```
|
||||
chmod +x restore.sh
|
||||
./restore.sh rmm-backup-xxxxxxx.tar
|
||||
```
|
||||
|
||||
## Using another ssl certificate
|
||||
During the install you can opt out of using the Let's Encrypt certificate. If you do this the script will create a self-signed certificate, so that https continues to work. You can replace the certificates in /certs/example.com/(privkey.pem | pubkey.pem) with your own.
|
||||
|
||||
If you are migrating from Let's Encrypt to another certificate provider, you can create the /certs directory and copy your certificates there. It is recommended to do this because this directory will be backed up with the backup script provided. Then modify the nginx configurations to use your new certificates
|
||||
|
||||
The cert that is generated is a wildcard certificate and is used in the nginx configurations: rmm.conf, api.conf, and mesh.conf. If you can't generate wildcard certificates you can create a cert for each subdomain and configure each nginx configuration file to use its own certificate. Then restart nginx:
|
||||
|
||||
```
|
||||
sudo systemctl restart nginx
|
||||
```
|
||||
@@ -1,9 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
[Latest](https://github.com/amidaware/tacticalrmm/releases/latest) release
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
https://docs.tacticalrmm.com/security
|
||||
457
_modules/win_agent.py
Normal file
457
_modules/win_agent.py
Normal file
@@ -0,0 +1,457 @@
|
||||
from __future__ import absolute_import
|
||||
import psutil
|
||||
import os
|
||||
import datetime
|
||||
import zlib
|
||||
import json
|
||||
import base64
|
||||
import wmi
|
||||
import win32evtlog
|
||||
import win32con
|
||||
import win32evtlogutil
|
||||
import winerror
|
||||
from time import sleep
|
||||
import requests
|
||||
import subprocess
|
||||
import random
|
||||
import platform
|
||||
|
||||
ARCH = "64" if platform.machine().endswith("64") else "32"
|
||||
PROGRAM_DIR = os.path.join(os.environ["ProgramFiles"], "TacticalAgent")
|
||||
TAC_RMM = os.path.join(PROGRAM_DIR, "tacticalrmm.exe")
|
||||
NSSM = os.path.join(PROGRAM_DIR, "nssm.exe" if ARCH == "64" else "nssm-x86.exe")
|
||||
TEMP_DIR = os.path.join(os.environ["WINDIR"], "Temp")
|
||||
SYS_DRIVE = os.environ["SystemDrive"]
|
||||
PY_BIN = os.path.join(SYS_DRIVE, "\\salt", "bin", "python.exe")
|
||||
SALT_CALL = os.path.join(SYS_DRIVE, "\\salt", "salt-call.bat")
|
||||
|
||||
|
||||
def get_services():
|
||||
# see https://github.com/wh1te909/tacticalrmm/issues/38
|
||||
# for why I am manually implementing the svc.as_dict() method of psutil
|
||||
ret = []
|
||||
for svc in psutil.win_service_iter():
|
||||
i = {}
|
||||
try:
|
||||
i["display_name"] = svc.display_name()
|
||||
i["binpath"] = svc.binpath()
|
||||
i["username"] = svc.username()
|
||||
i["start_type"] = svc.start_type()
|
||||
i["status"] = svc.status()
|
||||
i["pid"] = svc.pid()
|
||||
i["name"] = svc.name()
|
||||
i["description"] = svc.description()
|
||||
except Exception:
|
||||
continue
|
||||
else:
|
||||
ret.append(i)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def run_python_script(filename, timeout, script_type="userdefined"):
|
||||
# no longer used in agent version 0.11.0
|
||||
file_path = os.path.join(TEMP_DIR, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
if script_type == "userdefined":
|
||||
__salt__["cp.get_file"](f"salt://scripts/userdefined/{filename}", file_path)
|
||||
else:
|
||||
__salt__["cp.get_file"](f"salt://scripts/{filename}", file_path)
|
||||
|
||||
return __salt__["cmd.run_all"](f"{PY_BIN} {file_path}", timeout=timeout)
|
||||
|
||||
|
||||
def run_script(filepath, filename, shell, timeout, args=[], bg=False):
|
||||
if shell == "powershell" or shell == "cmd":
|
||||
if args:
|
||||
return __salt__["cmd.script"](
|
||||
source=filepath,
|
||||
args=" ".join(map(lambda x: f'"{x}"', args)),
|
||||
shell=shell,
|
||||
timeout=timeout,
|
||||
bg=bg,
|
||||
)
|
||||
else:
|
||||
return __salt__["cmd.script"](
|
||||
source=filepath, shell=shell, timeout=timeout, bg=bg
|
||||
)
|
||||
|
||||
elif shell == "python":
|
||||
file_path = os.path.join(TEMP_DIR, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
__salt__["cp.get_file"](filepath, file_path)
|
||||
|
||||
salt_cmd = "cmd.run_bg" if bg else "cmd.run_all"
|
||||
|
||||
if args:
|
||||
a = " ".join(map(lambda x: f'"{x}"', args))
|
||||
cmd = f"{PY_BIN} {file_path} {a}"
|
||||
return __salt__[salt_cmd](cmd, timeout=timeout)
|
||||
else:
|
||||
return __salt__[salt_cmd](f"{PY_BIN} {file_path}", timeout=timeout)
|
||||
|
||||
|
||||
def uninstall_agent():
|
||||
remove_exe = os.path.join(PROGRAM_DIR, "unins000.exe")
|
||||
__salt__["cmd.run_bg"]([remove_exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"])
|
||||
return "ok"
|
||||
|
||||
|
||||
def update_salt():
|
||||
for p in psutil.process_iter():
|
||||
with p.oneshot():
|
||||
if p.name() == "tacticalrmm.exe" and "updatesalt" in p.cmdline():
|
||||
return "running"
|
||||
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
cmd = [TAC_RMM, "-m", "updatesalt"]
|
||||
p = Popen(
|
||||
cmd,
|
||||
stdin=PIPE,
|
||||
stdout=PIPE,
|
||||
stderr=PIPE,
|
||||
close_fds=True,
|
||||
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
||||
)
|
||||
return p.pid
|
||||
|
||||
|
||||
def run_manual_checks():
|
||||
__salt__["cmd.run_bg"]([TAC_RMM, "-m", "runchecks"])
|
||||
return "ok"
|
||||
|
||||
|
||||
def install_updates():
|
||||
for p in psutil.process_iter():
|
||||
with p.oneshot():
|
||||
if p.name() == "tacticalrmm.exe" and "winupdater" in p.cmdline():
|
||||
return "running"
|
||||
|
||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "winupdater"])
|
||||
|
||||
|
||||
def _wait_for_service(svc, status, retries=10):
|
||||
attempts = 0
|
||||
while 1:
|
||||
try:
|
||||
service = psutil.win_service_get(svc)
|
||||
except psutil.NoSuchProcess:
|
||||
stat = "fail"
|
||||
attempts += 1
|
||||
sleep(5)
|
||||
else:
|
||||
stat = service.status()
|
||||
if stat != status:
|
||||
attempts += 1
|
||||
sleep(5)
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts == 0 or attempts > retries:
|
||||
break
|
||||
|
||||
return stat
|
||||
|
||||
|
||||
def agent_update_v2(inno, url):
|
||||
# make sure another instance of the update is not running
|
||||
# this function spawns 2 instances of itself (because we call it twice with salt run_bg)
|
||||
# so if more than 2 running, don't continue as an update is already running
|
||||
count = 0
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
with p.oneshot():
|
||||
if "win_agent.agent_update_v2" in p.cmdline():
|
||||
count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if count > 2:
|
||||
return "already running"
|
||||
|
||||
sleep(random.randint(1, 20)) # don't flood the rmm
|
||||
|
||||
exe = os.path.join(TEMP_DIR, inno)
|
||||
|
||||
if os.path.exists(exe):
|
||||
try:
|
||||
os.remove(exe)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
r = requests.get(url, stream=True, timeout=600)
|
||||
except Exception:
|
||||
return "failed"
|
||||
|
||||
if r.status_code != 200:
|
||||
return "failed"
|
||||
|
||||
with open(exe, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
|
||||
ret = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=120)
|
||||
|
||||
tac = _wait_for_service(svc="tacticalagent", status="running")
|
||||
if tac != "running":
|
||||
subprocess.run([NSSM, "start", "tacticalagent"], timeout=30)
|
||||
|
||||
chk = _wait_for_service(svc="checkrunner", status="running")
|
||||
if chk != "running":
|
||||
subprocess.run([NSSM, "start", "checkrunner"], timeout=30)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
def do_agent_update_v2(inno, url):
|
||||
return __salt__["cmd.run_bg"](
|
||||
[
|
||||
SALT_CALL,
|
||||
"win_agent.agent_update_v2",
|
||||
f"inno={inno}",
|
||||
f"url={url}",
|
||||
"--local",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def agent_update(version, url):
|
||||
# make sure another instance of the update is not running
|
||||
# this function spawns 2 instances of itself so if more than 2 running,
|
||||
# don't continue as an update is already running
|
||||
count = 0
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
with p.oneshot():
|
||||
if "win_agent.agent_update" in p.cmdline():
|
||||
count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if count > 2:
|
||||
return "already running"
|
||||
|
||||
sleep(random.randint(1, 60)) # don't flood the rmm
|
||||
try:
|
||||
r = requests.get(url, stream=True, timeout=600)
|
||||
except Exception:
|
||||
return "failed"
|
||||
|
||||
if r.status_code != 200:
|
||||
return "failed"
|
||||
|
||||
exe = os.path.join(TEMP_DIR, f"winagent-v{version}.exe")
|
||||
|
||||
with open(exe, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
|
||||
services = ("tacticalagent", "checkrunner")
|
||||
|
||||
for svc in services:
|
||||
subprocess.run([NSSM, "stop", svc], timeout=120)
|
||||
|
||||
sleep(10)
|
||||
r = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=300)
|
||||
sleep(30)
|
||||
|
||||
for svc in services:
|
||||
subprocess.run([NSSM, "start", svc], timeout=120)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
def do_agent_update(version, url):
|
||||
return __salt__["cmd.run_bg"](
|
||||
[
|
||||
SALT_CALL,
|
||||
"win_agent.agent_update",
|
||||
f"version={version}",
|
||||
f"url={url}",
|
||||
"--local",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class SystemDetail:
|
||||
def __init__(self):
|
||||
self.c = wmi.WMI()
|
||||
self.comp_sys_prod = self.c.Win32_ComputerSystemProduct()
|
||||
self.comp_sys = self.c.Win32_ComputerSystem()
|
||||
self.memory = self.c.Win32_PhysicalMemory()
|
||||
self.os = self.c.Win32_OperatingSystem()
|
||||
self.base_board = self.c.Win32_BaseBoard()
|
||||
self.bios = self.c.Win32_BIOS()
|
||||
self.disk = self.c.Win32_DiskDrive()
|
||||
self.network_adapter = self.c.Win32_NetworkAdapter()
|
||||
self.network_config = self.c.Win32_NetworkAdapterConfiguration()
|
||||
self.desktop_monitor = self.c.Win32_DesktopMonitor()
|
||||
self.cpu = self.c.Win32_Processor()
|
||||
self.usb = self.c.Win32_USBController()
|
||||
|
||||
def get_all(self, obj):
|
||||
ret = []
|
||||
for i in obj:
|
||||
tmp = [
|
||||
{j: getattr(i, j)}
|
||||
for j in list(i.properties)
|
||||
if getattr(i, j) is not None
|
||||
]
|
||||
ret.append(tmp)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def system_info():
|
||||
info = SystemDetail()
|
||||
return {
|
||||
"comp_sys_prod": info.get_all(info.comp_sys_prod),
|
||||
"comp_sys": info.get_all(info.comp_sys),
|
||||
"mem": info.get_all(info.memory),
|
||||
"os": info.get_all(info.os),
|
||||
"base_board": info.get_all(info.base_board),
|
||||
"bios": info.get_all(info.bios),
|
||||
"disk": info.get_all(info.disk),
|
||||
"network_adapter": info.get_all(info.network_adapter),
|
||||
"network_config": info.get_all(info.network_config),
|
||||
"desktop_monitor": info.get_all(info.desktop_monitor),
|
||||
"cpu": info.get_all(info.cpu),
|
||||
"usb": info.get_all(info.usb),
|
||||
}
|
||||
|
||||
|
||||
def local_sys_info():
|
||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "sysinfo"])
|
||||
|
||||
|
||||
def get_procs():
|
||||
ret = []
|
||||
|
||||
# setup
|
||||
for proc in psutil.process_iter():
|
||||
with proc.oneshot():
|
||||
proc.cpu_percent(interval=None)
|
||||
|
||||
# need time for psutil to record cpu percent
|
||||
sleep(1)
|
||||
|
||||
for c, proc in enumerate(psutil.process_iter(), 1):
|
||||
x = {}
|
||||
with proc.oneshot():
|
||||
if proc.pid == 0 or not proc.name():
|
||||
continue
|
||||
|
||||
x["name"] = proc.name()
|
||||
x["cpu_percent"] = proc.cpu_percent(interval=None) / psutil.cpu_count()
|
||||
x["memory_percent"] = proc.memory_percent()
|
||||
x["pid"] = proc.pid
|
||||
x["ppid"] = proc.ppid()
|
||||
x["status"] = proc.status()
|
||||
x["username"] = proc.username()
|
||||
x["id"] = c
|
||||
|
||||
ret.append(x)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _compress_json(j):
|
||||
return {
|
||||
"wineventlog": base64.b64encode(
|
||||
zlib.compress(json.dumps(j).encode("utf-8", errors="ignore"))
|
||||
).decode("ascii", errors="ignore")
|
||||
}
|
||||
|
||||
|
||||
def get_eventlog(logtype, last_n_days):
|
||||
|
||||
start_time = datetime.datetime.now() - datetime.timedelta(days=last_n_days)
|
||||
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ
|
||||
|
||||
status_dict = {
|
||||
win32con.EVENTLOG_AUDIT_FAILURE: "AUDIT_FAILURE",
|
||||
win32con.EVENTLOG_AUDIT_SUCCESS: "AUDIT_SUCCESS",
|
||||
win32con.EVENTLOG_INFORMATION_TYPE: "INFO",
|
||||
win32con.EVENTLOG_WARNING_TYPE: "WARNING",
|
||||
win32con.EVENTLOG_ERROR_TYPE: "ERROR",
|
||||
0: "INFO",
|
||||
}
|
||||
|
||||
computer = "localhost"
|
||||
hand = win32evtlog.OpenEventLog(computer, logtype)
|
||||
total = win32evtlog.GetNumberOfEventLogRecords(hand)
|
||||
log = []
|
||||
uid = 0
|
||||
done = False
|
||||
|
||||
try:
|
||||
while 1:
|
||||
events = win32evtlog.ReadEventLog(hand, flags, 0)
|
||||
for ev_obj in events:
|
||||
|
||||
uid += 1
|
||||
# return once total number of events reach or we'll be stuck in an infinite loop
|
||||
if uid >= total:
|
||||
done = True
|
||||
break
|
||||
|
||||
the_time = ev_obj.TimeGenerated.Format()
|
||||
time_obj = datetime.datetime.strptime(the_time, "%c")
|
||||
if time_obj < start_time:
|
||||
done = True
|
||||
break
|
||||
|
||||
computer = str(ev_obj.ComputerName)
|
||||
src = str(ev_obj.SourceName)
|
||||
evt_type = str(status_dict[ev_obj.EventType])
|
||||
evt_id = str(winerror.HRESULT_CODE(ev_obj.EventID))
|
||||
evt_category = str(ev_obj.EventCategory)
|
||||
record = str(ev_obj.RecordNumber)
|
||||
msg = (
|
||||
str(win32evtlogutil.SafeFormatMessage(ev_obj, logtype))
|
||||
.replace("<", "")
|
||||
.replace(">", "")
|
||||
)
|
||||
|
||||
event_dict = {
|
||||
"computer": computer,
|
||||
"source": src,
|
||||
"eventType": evt_type,
|
||||
"eventID": evt_id,
|
||||
"eventCategory": evt_category,
|
||||
"message": msg,
|
||||
"time": the_time,
|
||||
"record": record,
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
log.append(event_dict)
|
||||
|
||||
if done:
|
||||
break
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
win32evtlog.CloseEventLog(hand)
|
||||
return _compress_json(log)
|
||||
@@ -1,3 +0,0 @@
|
||||
### tacticalrmm ansible WIP
|
||||
|
||||
ansible role to setup a Debian 11 VM for tacticalrmm local development
|
||||
@@ -1,40 +0,0 @@
|
||||
---
|
||||
user: "tactical"
|
||||
python_ver: "3.11.8"
|
||||
go_ver: "1.20.7"
|
||||
backend_repo: "https://github.com/amidaware/tacticalrmm.git"
|
||||
frontend_repo: "https://github.com/amidaware/tacticalrmm-web.git"
|
||||
scripts_repo: "https://github.com/amidaware/community-scripts.git"
|
||||
backend_dir: "/opt/trmm"
|
||||
frontend_dir: "/opt/trmm-web"
|
||||
scripts_dir: "/opt/trmm-community-scripts"
|
||||
trmm_dir: "{{ backend_dir }}/api/tacticalrmm/tacticalrmm"
|
||||
mesh_dir: "/opt/meshcentral"
|
||||
settings_file: "{{ trmm_dir }}/settings.py"
|
||||
local_settings_file: "{{ trmm_dir }}/local_settings.py"
|
||||
fullchain_dest: /etc/ssl/certs/fullchain.pem
|
||||
privkey_dest: /etc/ssl/certs/privkey.pem
|
||||
|
||||
base_pkgs:
|
||||
- build-essential
|
||||
- curl
|
||||
- wget
|
||||
- dirmngr
|
||||
- gnupg
|
||||
- openssl
|
||||
- gcc
|
||||
- g++
|
||||
- make
|
||||
- ca-certificates
|
||||
- git
|
||||
|
||||
python_pkgs:
|
||||
- zlib1g-dev
|
||||
- libncurses5-dev
|
||||
- libgdbm-dev
|
||||
- libnss3-dev
|
||||
- libssl-dev
|
||||
- libreadline-dev
|
||||
- libffi-dev
|
||||
- libsqlite3-dev
|
||||
- libbz2-dev
|
||||
@@ -1,31 +0,0 @@
|
||||
worker_rlimit_nofile 1000000;
|
||||
user www-data;
|
||||
worker_processes auto;
|
||||
pid /run/nginx.pid;
|
||||
include /etc/nginx/modules-enabled/*.conf;
|
||||
|
||||
events {
|
||||
worker_connections 4096;
|
||||
}
|
||||
|
||||
http {
|
||||
sendfile on;
|
||||
server_tokens off;
|
||||
tcp_nopush on;
|
||||
types_hash_max_size 2048;
|
||||
server_names_hash_bucket_size 256;
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_ciphers EECDH+AESGCM:EDH+AESGCM;
|
||||
ssl_ecdh_curve secp384r1;
|
||||
ssl_stapling on;
|
||||
ssl_stapling_verify on;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
access_log /var/log/nginx/access.log;
|
||||
error_log /var/log/nginx/error.log;
|
||||
gzip on;
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
include /etc/nginx/sites-enabled/*;
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
" This file loads the default vim options at the beginning and prevents
|
||||
" that they are being loaded again later. All other options that will be set,
|
||||
" are added, or overwrite the default settings. Add as many options as you
|
||||
" whish at the end of this file.
|
||||
|
||||
" Load the defaults
|
||||
source $VIMRUNTIME/defaults.vim
|
||||
|
||||
" Prevent the defaults from being loaded again later, if the user doesn't
|
||||
" have a local vimrc (~/.vimrc)
|
||||
let skip_defaults_vim = 1
|
||||
|
||||
|
||||
" Set more options (overwrites settings from /usr/share/vim/vim80/defaults.vim)
|
||||
" Add as many options as you whish
|
||||
|
||||
" Set the mouse mode to 'r'
|
||||
if has('mouse')
|
||||
set mouse=r
|
||||
endif
|
||||
@@ -1,634 +0,0 @@
|
||||
---
|
||||
- name: Append subdomains to hosts
|
||||
tags: hosts
|
||||
become: yes
|
||||
ansible.builtin.lineinfile:
|
||||
path: /etc/hosts
|
||||
backrefs: yes
|
||||
regexp: '^(127\.0\.1\.1 .*)$'
|
||||
line: "\\1 {{ api }} {{ mesh }} {{ rmm }}"
|
||||
|
||||
- name: set mouse mode for vim
|
||||
tags: vim
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
src: vimrc.local
|
||||
dest: /etc/vim/vimrc.local
|
||||
owner: "root"
|
||||
group: "root"
|
||||
mode: "0644"
|
||||
|
||||
- name: set max_user_watches
|
||||
tags: sysctl
|
||||
become: yes
|
||||
ansible.builtin.lineinfile:
|
||||
path: /etc/sysctl.conf
|
||||
line: fs.inotify.max_user_watches=524288
|
||||
|
||||
- name: reload sysctl
|
||||
tags: sysctl
|
||||
become: yes
|
||||
ansible.builtin.command:
|
||||
cmd: sysctl -p
|
||||
|
||||
- name: install base packages
|
||||
tags: base
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
pkg: "{{ item }}"
|
||||
state: present
|
||||
update_cache: yes
|
||||
with_items:
|
||||
- "{{ base_pkgs }}"
|
||||
|
||||
- name: set arch fact
|
||||
ansible.builtin.set_fact:
|
||||
goarch: "{{ 'amd64' if ansible_architecture == 'x86_64' else 'arm64' }}"
|
||||
|
||||
- name: download and install golang
|
||||
tags: golang
|
||||
become: yes
|
||||
ansible.builtin.unarchive:
|
||||
src: "https://go.dev/dl/go{{ go_ver }}.linux-{{ goarch }}.tar.gz"
|
||||
dest: /usr/local
|
||||
remote_src: yes
|
||||
|
||||
- name: add golang to path
|
||||
become: yes
|
||||
tags: golang
|
||||
ansible.builtin.copy:
|
||||
dest: /etc/profile.d/golang.sh
|
||||
content: "PATH=$PATH:/usr/local/go/bin"
|
||||
|
||||
- name: install python prereqs
|
||||
tags: python
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
pkg: "{{ item }}"
|
||||
state: present
|
||||
with_items:
|
||||
- "{{ python_pkgs }}"
|
||||
|
||||
- name: get cpu core count
|
||||
tags: python
|
||||
ansible.builtin.command: nproc
|
||||
register: numprocs
|
||||
|
||||
- name: Create python tmpdir
|
||||
tags: python
|
||||
ansible.builtin.tempfile:
|
||||
state: directory
|
||||
suffix: python
|
||||
register: python_tmp
|
||||
|
||||
- name: download and extract python
|
||||
tags: python
|
||||
ansible.builtin.unarchive:
|
||||
src: "https://www.python.org/ftp/python/{{ python_ver }}/Python-{{ python_ver }}.tgz"
|
||||
dest: "{{ python_tmp.path }}"
|
||||
remote_src: yes
|
||||
|
||||
- name: compile python
|
||||
tags: python
|
||||
ansible.builtin.shell:
|
||||
chdir: "{{ python_tmp.path }}/Python-{{ python_ver }}"
|
||||
cmd: |
|
||||
./configure --enable-optimizations
|
||||
make -j {{ numprocs.stdout }}
|
||||
|
||||
- name: alt install python
|
||||
tags: python
|
||||
become: yes
|
||||
ansible.builtin.shell:
|
||||
chdir: "{{ python_tmp.path }}/Python-{{ python_ver }}"
|
||||
cmd: |
|
||||
make altinstall
|
||||
|
||||
- name: install redis
|
||||
tags: redis
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
pkg: redis
|
||||
state: present
|
||||
|
||||
- name: create postgres repo
|
||||
tags: postgres
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
content: "deb http://apt.postgresql.org/pub/repos/apt {{ ansible_distribution_release }}-pgdg main"
|
||||
dest: /etc/apt/sources.list.d/pgdg.list
|
||||
owner: root
|
||||
group: root
|
||||
mode: "0644"
|
||||
|
||||
- name: import postgres repo signing key
|
||||
tags: postgres
|
||||
become: yes
|
||||
ansible.builtin.apt_key:
|
||||
url: https://www.postgresql.org/media/keys/ACCC4CF8.asc
|
||||
state: present
|
||||
|
||||
- name: install postgresql
|
||||
tags: postgres
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
pkg: postgresql-15
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
- name: ensure postgres enabled and started
|
||||
tags: postgres
|
||||
become: yes
|
||||
ansible.builtin.service:
|
||||
name: postgresql
|
||||
enabled: yes
|
||||
state: started
|
||||
|
||||
- name: setup trmm database
|
||||
tags: postgres
|
||||
become: yes
|
||||
become_user: postgres
|
||||
ansible.builtin.shell:
|
||||
cmd: |
|
||||
psql -c "CREATE DATABASE tacticalrmm"
|
||||
psql -c "CREATE USER {{ db_user }} WITH PASSWORD '{{ db_passwd }}'"
|
||||
psql -c "ALTER ROLE {{ db_user }} SET client_encoding TO 'utf8'"
|
||||
psql -c "ALTER ROLE {{ db_user }} SET default_transaction_isolation TO 'read committed'"
|
||||
psql -c "ALTER ROLE {{ db_user }} SET timezone TO 'UTC'"
|
||||
psql -c "ALTER ROLE {{ db_user }} CREATEDB"
|
||||
psql -c "GRANT ALL PRIVILEGES ON DATABASE tacticalrmm TO {{ db_user }}"
|
||||
psql -c "ALTER DATABASE tacticalrmm OWNER TO {{ db_user }}"
|
||||
psql -c "GRANT USAGE, CREATE ON SCHEMA PUBLIC TO {{ db_user }}"
|
||||
|
||||
- name: setup mesh database
|
||||
tags: postgres
|
||||
become: yes
|
||||
become_user: postgres
|
||||
ansible.builtin.shell:
|
||||
cmd: |
|
||||
psql -c "CREATE DATABASE meshcentral"
|
||||
psql -c "CREATE USER {{ mesh_db_user }} WITH PASSWORD '{{ mesh_db_passwd }}'"
|
||||
psql -c "ALTER ROLE {{ mesh_db_user }} SET client_encoding TO 'utf8'"
|
||||
psql -c "ALTER ROLE {{ mesh_db_user }} SET default_transaction_isolation TO 'read committed'"
|
||||
psql -c "ALTER ROLE {{ mesh_db_user }} SET timezone TO 'UTC'"
|
||||
psql -c "GRANT ALL PRIVILEGES ON DATABASE meshcentral TO {{ mesh_db_user }}"
|
||||
psql -c "ALTER DATABASE meshcentral OWNER TO {{ mesh_db_user }}"
|
||||
psql -c "GRANT USAGE, CREATE ON SCHEMA PUBLIC TO {{ mesh_db_user }}"
|
||||
|
||||
- name: create repo dirs
|
||||
become: yes
|
||||
tags: git
|
||||
ansible.builtin.file:
|
||||
path: "{{ item }}"
|
||||
state: directory
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
mode: "0755"
|
||||
with_items:
|
||||
- "{{ backend_dir }}"
|
||||
- "{{ frontend_dir }}"
|
||||
- "{{ scripts_dir }}"
|
||||
|
||||
- name: git clone repos
|
||||
tags: git
|
||||
ansible.builtin.git:
|
||||
repo: "{{ item.repo }}"
|
||||
dest: "{{ item.dest }}"
|
||||
version: "{{ item.version }}"
|
||||
with_items:
|
||||
- {
|
||||
repo: "{{ backend_repo }}",
|
||||
dest: "{{ backend_dir }}",
|
||||
version: develop,
|
||||
}
|
||||
- {
|
||||
repo: "{{ frontend_repo }}",
|
||||
dest: "{{ frontend_dir }}",
|
||||
version: develop,
|
||||
}
|
||||
- { repo: "{{ scripts_repo }}", dest: "{{ scripts_dir }}", version: main }
|
||||
|
||||
- name: get nats_server_ver
|
||||
tags: nats
|
||||
ansible.builtin.shell: grep "^NATS_SERVER_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}'
|
||||
register: nats_server_ver
|
||||
|
||||
- name: Create nats tmpdir
|
||||
tags: nats
|
||||
ansible.builtin.tempfile:
|
||||
state: directory
|
||||
suffix: nats
|
||||
register: nats_tmp
|
||||
|
||||
- name: download and extract nats
|
||||
tags: nats
|
||||
ansible.builtin.unarchive:
|
||||
src: "https://github.com/nats-io/nats-server/releases/download/v{{ nats_server_ver.stdout }}/nats-server-v{{ nats_server_ver.stdout }}-linux-{{ goarch }}.tar.gz"
|
||||
dest: "{{ nats_tmp.path }}"
|
||||
remote_src: yes
|
||||
|
||||
- name: install nats
|
||||
tags: nats
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
remote_src: yes
|
||||
src: "{{ nats_tmp.path }}/nats-server-v{{ nats_server_ver.stdout }}-linux-{{ goarch }}/nats-server"
|
||||
dest: /usr/local/bin/nats-server
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: Create nodejs tmpdir
|
||||
tags: nodejs
|
||||
ansible.builtin.tempfile:
|
||||
state: directory
|
||||
suffix: nodejs
|
||||
register: nodejs_tmp
|
||||
|
||||
- name: download nodejs setup
|
||||
tags: nodejs
|
||||
ansible.builtin.get_url:
|
||||
url: https://deb.nodesource.com/setup_18.x
|
||||
dest: "{{ nodejs_tmp.path }}/setup_node.sh"
|
||||
mode: "0755"
|
||||
|
||||
- name: run node setup script
|
||||
tags: nodejs
|
||||
become: yes
|
||||
ansible.builtin.command:
|
||||
cmd: "{{ nodejs_tmp.path }}/setup_node.sh"
|
||||
|
||||
- name: install nodejs
|
||||
tags: nodejs
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
pkg: nodejs
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
- name: update npm
|
||||
tags: nodejs
|
||||
become: yes
|
||||
ansible.builtin.shell:
|
||||
cmd: npm install -g npm
|
||||
|
||||
- name: install quasar cli
|
||||
tags: quasar
|
||||
become: yes
|
||||
ansible.builtin.shell:
|
||||
cmd: npm install -g @quasar/cli
|
||||
|
||||
- name: install frontend
|
||||
tags: quasar
|
||||
ansible.builtin.shell:
|
||||
chdir: "{{ frontend_dir }}"
|
||||
cmd: npm install
|
||||
|
||||
- name: add quasar env
|
||||
tags: quasar
|
||||
ansible.builtin.template:
|
||||
src: quasar.env.j2
|
||||
dest: "{{ frontend_dir }}/.env"
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
mode: "0644"
|
||||
|
||||
- name: remove tempdirs
|
||||
tags: cleanup
|
||||
become: yes
|
||||
ignore_errors: yes
|
||||
ansible.builtin.file:
|
||||
path: "{{ item }}"
|
||||
state: absent
|
||||
with_items:
|
||||
- "{{ nats_tmp.path }}"
|
||||
- "{{ python_tmp.path }}"
|
||||
- "{{ nodejs_tmp.path }}"
|
||||
|
||||
- name: deploy fullchain
|
||||
tags: certs
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
src: "{{ fullchain_src }}"
|
||||
dest: "{{ fullchain_dest }}"
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
mode: "0440"
|
||||
|
||||
- name: deploy privkey
|
||||
tags: certs
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
src: "{{ privkey_src }}"
|
||||
dest: "{{ privkey_dest }}"
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
mode: "0440"
|
||||
|
||||
- name: import nginx signing key
|
||||
tags: nginx
|
||||
become: yes
|
||||
ansible.builtin.apt_key:
|
||||
url: https://nginx.org/keys/nginx_signing.key
|
||||
state: present
|
||||
|
||||
- name: add nginx repo
|
||||
tags: nginx
|
||||
become: yes
|
||||
ansible.builtin.template:
|
||||
src: nginx.repo.j2
|
||||
dest: /etc/apt/sources.list.d/nginx.list
|
||||
owner: "root"
|
||||
group: "root"
|
||||
mode: "0644"
|
||||
|
||||
- name: install nginx
|
||||
tags: nginx
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
pkg: nginx
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
- name: set nginx default conf
|
||||
tags: nginx
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
src: nginx-default.conf
|
||||
dest: /etc/nginx/nginx.conf
|
||||
owner: "root"
|
||||
group: "root"
|
||||
mode: "0644"
|
||||
|
||||
- name: create nginx dirs
|
||||
become: yes
|
||||
tags: nginx
|
||||
ansible.builtin.file:
|
||||
state: directory
|
||||
path: "{{ item }}"
|
||||
mode: "0755"
|
||||
with_items:
|
||||
- /etc/nginx/sites-available
|
||||
- /etc/nginx/sites-enabled
|
||||
|
||||
- name: deploy nginx sites
|
||||
become: yes
|
||||
tags: nginx
|
||||
ansible.builtin.template:
|
||||
src: "{{ item.src }}"
|
||||
dest: "{{ item.dest }}"
|
||||
mode: "0644"
|
||||
owner: root
|
||||
group: root
|
||||
with_items:
|
||||
- { src: backend.nginx.j2, dest: /etc/nginx/sites-available/backend.conf }
|
||||
- { src: mesh.nginx.j2, dest: /etc/nginx/sites-available/mesh.conf }
|
||||
|
||||
- name: enable nginx sites
|
||||
become: yes
|
||||
tags: nginx
|
||||
ansible.builtin.file:
|
||||
src: "{{ item.src }}"
|
||||
dest: "{{ item.dest }}"
|
||||
mode: "0644"
|
||||
owner: root
|
||||
group: root
|
||||
state: link
|
||||
with_items:
|
||||
- {
|
||||
src: /etc/nginx/sites-available/backend.conf,
|
||||
dest: /etc/nginx/sites-enabled/backend.conf,
|
||||
}
|
||||
- {
|
||||
src: /etc/nginx/sites-available/mesh.conf,
|
||||
dest: /etc/nginx/sites-enabled/mesh.conf,
|
||||
}
|
||||
|
||||
- name: ensure nginx enabled and restarted
|
||||
tags: nginx
|
||||
become: yes
|
||||
ansible.builtin.service:
|
||||
name: nginx
|
||||
enabled: yes
|
||||
state: restarted
|
||||
|
||||
- name: set natsapi fact
|
||||
ansible.builtin.set_fact:
|
||||
natsapi: "{{ 'nats-api' if ansible_architecture == 'x86_64' else 'nats-api-arm64' }}"
|
||||
|
||||
- name: copy nats-api bin
|
||||
tags: nats-api
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
remote_src: yes
|
||||
src: "{{ backend_dir }}/natsapi/bin/{{ natsapi }}"
|
||||
dest: /usr/local/bin/nats-api
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: get setuptools_ver
|
||||
tags: pip
|
||||
ansible.builtin.shell: grep "^SETUPTOOLS_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}'
|
||||
register: setuptools_ver
|
||||
|
||||
- name: get wheel_ver
|
||||
tags: pip
|
||||
ansible.builtin.shell: grep "^WHEEL_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}'
|
||||
register: wheel_ver
|
||||
|
||||
- name: setup virtual env
|
||||
tags: pip
|
||||
ansible.builtin.shell:
|
||||
chdir: "{{ backend_dir }}/api"
|
||||
cmd: python3.11 -m venv env
|
||||
|
||||
- name: update pip to latest
|
||||
tags: pip
|
||||
ansible.builtin.pip:
|
||||
virtualenv: "{{ backend_dir }}/api/env"
|
||||
name: pip
|
||||
state: latest
|
||||
|
||||
- name: install setuptools and wheel
|
||||
tags: pip
|
||||
ansible.builtin.pip:
|
||||
virtualenv: "{{ backend_dir }}/api/env"
|
||||
name: "{{ item }}"
|
||||
with_items:
|
||||
- "setuptools=={{ setuptools_ver.stdout }}"
|
||||
- "wheel=={{ wheel_ver.stdout }}"
|
||||
|
||||
- name: install python packages
|
||||
tags: pip
|
||||
ansible.builtin.pip:
|
||||
virtualenv: "{{ backend_dir }}/api/env"
|
||||
chdir: "{{ backend_dir }}/api/tacticalrmm"
|
||||
requirements: "{{ item }}"
|
||||
with_items:
|
||||
- requirements.txt
|
||||
- requirements-dev.txt
|
||||
- requirements-test.txt
|
||||
|
||||
- name: deploy django local settings
|
||||
tags: django
|
||||
ansible.builtin.template:
|
||||
src: local_settings.j2
|
||||
dest: "{{ local_settings_file }}"
|
||||
mode: "0644"
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
|
||||
- name: setup django
|
||||
tags: django
|
||||
ansible.builtin.shell:
|
||||
chdir: "{{ backend_dir }}/api/tacticalrmm"
|
||||
cmd: |
|
||||
. ../env/bin/activate
|
||||
python manage.py migrate --no-input
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py create_natsapi_conf
|
||||
python manage.py load_chocos
|
||||
python manage.py load_community_scripts
|
||||
echo "from accounts.models import User; User.objects.create_superuser('{{ django_user }}', '{{ github_email }}', '{{ django_password }}') if not User.objects.filter(username='{{ django_user }}').exists() else 0;" | python manage.py shell
|
||||
python manage.py create_installer_user
|
||||
|
||||
- name: deploy services
|
||||
tags: services
|
||||
become: yes
|
||||
ansible.builtin.template:
|
||||
src: "{{ item.src }}"
|
||||
dest: "{{ item.dest }}"
|
||||
mode: "0644"
|
||||
owner: "root"
|
||||
group: "root"
|
||||
with_items:
|
||||
- { src: nats-api.systemd.j2, dest: /etc/systemd/system/nats-api.service }
|
||||
- { src: nats-server.systemd.j2, dest: /etc/systemd/system/nats.service }
|
||||
- { src: mesh.systemd.j2, dest: /etc/systemd/system/meshcentral.service }
|
||||
|
||||
- name: get mesh_ver
|
||||
tags: mesh
|
||||
ansible.builtin.shell: grep "^MESH_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}'
|
||||
register: mesh_ver
|
||||
|
||||
- name: create meshcentral data directory
|
||||
tags: mesh
|
||||
become: yes
|
||||
ansible.builtin.file:
|
||||
path: "{{ mesh_dir }}/meshcentral-data"
|
||||
state: directory
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: install meshcentral
|
||||
tags: mesh
|
||||
ansible.builtin.command:
|
||||
chdir: "{{ mesh_dir }}"
|
||||
cmd: "npm install meshcentral@{{ mesh_ver.stdout }}"
|
||||
|
||||
- name: deploy mesh config
|
||||
tags: mesh
|
||||
ansible.builtin.template:
|
||||
src: mesh.cfg.j2
|
||||
dest: "{{ mesh_dir }}/meshcentral-data/config.json"
|
||||
mode: "0644"
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
|
||||
- name: start meshcentral
|
||||
tags: mesh
|
||||
become: yes
|
||||
ansible.builtin.systemd:
|
||||
name: meshcentral.service
|
||||
state: started
|
||||
enabled: yes
|
||||
daemon_reload: yes
|
||||
|
||||
- name: wait for meshcentral to be ready
|
||||
tags: mesh
|
||||
uri:
|
||||
url: "https://{{ mesh }}"
|
||||
return_content: yes
|
||||
validate_certs: yes
|
||||
status_code: 200
|
||||
register: mesh_status
|
||||
until: mesh_status.status == 200
|
||||
retries: 20
|
||||
delay: 3
|
||||
|
||||
- name: get meshcentral login token key
|
||||
tags: mesh_key
|
||||
ansible.builtin.command:
|
||||
chdir: "{{ mesh_dir }}"
|
||||
cmd: node node_modules/meshcentral --logintokenkey
|
||||
register: mesh_token_key
|
||||
|
||||
- name: add mesh key to django settings file
|
||||
tags: mesh_key
|
||||
ansible.builtin.lineinfile:
|
||||
path: "{{ local_settings_file }}"
|
||||
line: 'MESH_TOKEN_KEY = "{{ mesh_token_key.stdout }}"'
|
||||
|
||||
- name: stop meshcentral service
|
||||
tags: mesh_user
|
||||
become: yes
|
||||
ansible.builtin.service:
|
||||
name: meshcentral.service
|
||||
state: stopped
|
||||
|
||||
- name: create mesh user
|
||||
tags: mesh_user
|
||||
ansible.builtin.shell:
|
||||
chdir: "{{ mesh_dir }}"
|
||||
cmd: |
|
||||
node node_modules/meshcentral --createaccount {{ mesh_user }} --pass {{ mesh_password }} --email {{ github_email }}
|
||||
node node_modules/meshcentral --adminaccount {{ mesh_user }}
|
||||
|
||||
- name: start meshcentral service
|
||||
tags: mesh_user
|
||||
become: yes
|
||||
ansible.builtin.service:
|
||||
name: meshcentral.service
|
||||
state: started
|
||||
|
||||
- name: wait for meshcentral to be ready
|
||||
tags: mesh_user
|
||||
uri:
|
||||
url: "https://{{ mesh }}"
|
||||
return_content: yes
|
||||
validate_certs: yes
|
||||
status_code: 200
|
||||
register: mesh_status
|
||||
until: mesh_status.status == 200
|
||||
retries: 20
|
||||
delay: 3
|
||||
|
||||
- name: create mesh device group
|
||||
tags: mesh_user
|
||||
ansible.builtin.shell:
|
||||
chdir: "{{ mesh_dir }}"
|
||||
cmd: |
|
||||
node node_modules/meshcentral/meshctrl.js --url wss://{{ mesh }}:443 --loginuser {{ mesh_user }} --loginpass {{ mesh_password }} AddDeviceGroup --name TacticalRMM
|
||||
|
||||
- name: finish up django
|
||||
tags: mesh_user
|
||||
ansible.builtin.shell:
|
||||
chdir: "{{ backend_dir }}/api/tacticalrmm"
|
||||
cmd: |
|
||||
. ../env/bin/activate
|
||||
python manage.py initial_db_setup
|
||||
python manage.py reload_nats
|
||||
|
||||
- name: restart services
|
||||
tags: services
|
||||
become: yes
|
||||
ansible.builtin.systemd:
|
||||
daemon_reload: yes
|
||||
enabled: yes
|
||||
state: restarted
|
||||
name: "{{ item }}.service"
|
||||
with_items:
|
||||
- nats
|
||||
- nats-api
|
||||
@@ -1,20 +0,0 @@
|
||||
server {
|
||||
listen 443 ssl reuseport;
|
||||
listen [::]:443 ssl;
|
||||
server_name {{ api }};
|
||||
client_max_body_size 300M;
|
||||
ssl_certificate {{ fullchain_dest }};
|
||||
ssl_certificate_key {{ privkey_dest }};
|
||||
|
||||
|
||||
location ~ ^/natsws {
|
||||
proxy_pass http://127.0.0.1:9235;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header X-Forwarded-Host $host:$server_port;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
SECRET_KEY = "{{ django_secret }}"
|
||||
DEBUG = True
|
||||
ALLOWED_HOSTS = ['{{ api }}']
|
||||
ADMIN_URL = "admin/"
|
||||
CORS_ORIGIN_ALLOW_ALL = True
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': 'tacticalrmm',
|
||||
'USER': '{{ db_user }}',
|
||||
'PASSWORD': '{{ db_passwd }}',
|
||||
'HOST': 'localhost',
|
||||
'PORT': '5432',
|
||||
}
|
||||
}
|
||||
ADMIN_ENABLED = True
|
||||
CERT_FILE = "{{ fullchain_dest }}"
|
||||
KEY_FILE = "{{ privkey_dest }}"
|
||||
MESH_USERNAME = "{{ mesh_user }}"
|
||||
MESH_SITE = "https://{{ mesh }}"
|
||||
@@ -1,37 +0,0 @@
|
||||
{
|
||||
"settings": {
|
||||
"Cert": "{{ mesh }}",
|
||||
"WANonly": true,
|
||||
"Minify": 1,
|
||||
"Port": 4430,
|
||||
"AliasPort": 443,
|
||||
"RedirPort": 800,
|
||||
"AllowLoginToken": true,
|
||||
"AllowFraming": true,
|
||||
"AgentPing": 35,
|
||||
"AllowHighQualityDesktop": true,
|
||||
"TlsOffload": "127.0.0.1",
|
||||
"agentCoreDump": false,
|
||||
"Compression": true,
|
||||
"WsCompression": true,
|
||||
"AgentWsCompression": true,
|
||||
"MaxInvalidLogin": { "time": 5, "count": 5, "coolofftime": 30 },
|
||||
"postgres": {
|
||||
"user": "{{ mesh_db_user }}",
|
||||
"password": "{{ mesh_db_passwd }}",
|
||||
"port": "5432",
|
||||
"host": "localhost"
|
||||
}
|
||||
},
|
||||
"domains": {
|
||||
"": {
|
||||
"Title": "Tactical RMM Dev",
|
||||
"Title2": "Tactical RMM Dev",
|
||||
"NewAccounts": false,
|
||||
"CertUrl": "https://{{ mesh }}:443/",
|
||||
"GeoLocation": true,
|
||||
"CookieIpCheck": false,
|
||||
"mstsc": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
server {
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
proxy_send_timeout 330s;
|
||||
proxy_read_timeout 330s;
|
||||
server_name {{ mesh }};
|
||||
ssl_certificate {{ fullchain_dest }};
|
||||
ssl_certificate_key {{ privkey_dest }};
|
||||
|
||||
ssl_session_cache shared:WEBSSL:10m;
|
||||
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:4430/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header X-Forwarded-Host $host:$server_port;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
[Unit]
|
||||
Description=MeshCentral Server
|
||||
After=network.target postgresql.service nginx.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
LimitNOFILE=1000000
|
||||
ExecStart=/usr/bin/node node_modules/meshcentral
|
||||
Environment=NODE_ENV=production
|
||||
WorkingDirectory={{ mesh_dir }}
|
||||
User={{ user }}
|
||||
Group={{ user }}
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -1,14 +0,0 @@
|
||||
[Unit]
|
||||
Description=TacticalRMM Nats Api
|
||||
After=nats.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/usr/local/bin/nats-api -config {{ backend_dir }}/api/tacticalrmm/nats-api.conf
|
||||
User={{ user }}
|
||||
Group={{ user }}
|
||||
Restart=always
|
||||
RestartSec=5s
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -1,18 +0,0 @@
|
||||
[Unit]
|
||||
Description=NATS Server
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
PrivateTmp=true
|
||||
Type=simple
|
||||
ExecStart=/usr/local/bin/nats-server -c {{ backend_dir }}/api/tacticalrmm/nats-rmm.conf
|
||||
ExecReload=/usr/bin/kill -s HUP $MAINPID
|
||||
ExecStop=/usr/bin/kill -s SIGINT $MAINPID
|
||||
User={{ user }}
|
||||
Group={{ user }}
|
||||
Restart=always
|
||||
RestartSec=5s
|
||||
LimitNOFILE=1000000
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -1,2 +0,0 @@
|
||||
deb https://nginx.org/packages/debian/ {{ ansible_distribution_release }} nginx
|
||||
deb-src https://nginx.org/packages/debian/ {{ ansible_distribution_release }} nginx
|
||||
@@ -1,4 +0,0 @@
|
||||
DEV_URL = "http://{{ api }}:8000"
|
||||
DEV_HOST = "0.0.0.0"
|
||||
DEV_PORT = "8080"
|
||||
USE_HTTPS = false
|
||||
@@ -1,22 +0,0 @@
|
||||
---
|
||||
- hosts: "{{ target }}"
|
||||
vars:
|
||||
ansible_user: tactical
|
||||
fullchain_src: /path/to/fullchain.pem
|
||||
privkey_src: /path/to/privkey.pem
|
||||
api: "api.example.com"
|
||||
rmm: "rmm.example.com"
|
||||
mesh: "mesh.example.com"
|
||||
github_username: "changeme"
|
||||
github_email: "changeme@example.com"
|
||||
mesh_user: "changeme"
|
||||
mesh_password: "changeme"
|
||||
db_user: "changeme"
|
||||
db_passwd: "changeme"
|
||||
mesh_db_user: "changeme"
|
||||
mesh_db_passwd: "changeme"
|
||||
django_secret: "changeme"
|
||||
django_user: "changeme"
|
||||
django_password: "changeme"
|
||||
roles:
|
||||
- trmm_dev
|
||||
@@ -1,15 +1,25 @@
|
||||
[run]
|
||||
include = *.py
|
||||
omit =
|
||||
tacticalrmm/asgi.py
|
||||
tacticalrmm/wsgi.py
|
||||
manage.py
|
||||
*/__pycache__/*
|
||||
*/env/*
|
||||
*/baker_recipes.py
|
||||
/usr/local/lib/*
|
||||
**/migrations/*
|
||||
**/test*.py
|
||||
|
||||
source = .
|
||||
[report]
|
||||
show_missing = True
|
||||
include = *.py
|
||||
omit =
|
||||
*/__pycache__/*
|
||||
*/env/*
|
||||
*/management/*
|
||||
*/migrations/*
|
||||
*/static/*
|
||||
manage.py
|
||||
*/local_settings.py
|
||||
*/apps.py
|
||||
*/admin.py
|
||||
*/celery.py
|
||||
*/wsgi.py
|
||||
*/settings.py
|
||||
*/baker_recipes.py
|
||||
*/urls.py
|
||||
*/tests.py
|
||||
*/test.py
|
||||
api/*.py
|
||||
checks/utils.py
|
||||
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
[flake8]
|
||||
ignore = E501,W503,E722,E203
|
||||
exclude =
|
||||
.mypy*
|
||||
.pytest*
|
||||
.git
|
||||
demo_data.py
|
||||
manage.py
|
||||
*/__pycache__/*
|
||||
*/env/*
|
||||
/usr/local/lib/*
|
||||
**/migrations/*
|
||||
@@ -1,8 +1,8 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from rest_framework.authtoken.admin import TokenAdmin
|
||||
|
||||
from .models import Role, User
|
||||
from .models import User
|
||||
|
||||
admin.site.register(User)
|
||||
TokenAdmin.raw_id_fields = ("user",)
|
||||
admin.site.register(Role)
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
import uuid
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
from tacticalrmm.helpers import make_random_password
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Creates the installer user"
|
||||
|
||||
def handle(self, *args, **kwargs): # type: ignore
|
||||
self.stdout.write("Checking if installer user has been created...")
|
||||
if User.objects.filter(is_installer_user=True).exists():
|
||||
self.stdout.write("Installer user already exists")
|
||||
return
|
||||
|
||||
User.objects.create_user(
|
||||
username=uuid.uuid4().hex,
|
||||
is_installer_user=True,
|
||||
password=make_random_password(len=60),
|
||||
block_dashboard_login=True,
|
||||
)
|
||||
self.stdout.write("Installer user has been created")
|
||||
@@ -1,12 +1,13 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from knox.models import AuthToken
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Deletes all knox web tokens"
|
||||
|
||||
def handle(self, *args, **kwargs): # type: ignore
|
||||
def handle(self, *args, **kwargs):
|
||||
# only delete web tokens, not any generated by the installer or deployments
|
||||
dont_delete = djangotime.now() + djangotime.timedelta(hours=23)
|
||||
tokens = AuthToken.objects.exclude(deploytokens__isnull=False).filter(
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import subprocess
|
||||
|
||||
import pyotp
|
||||
import subprocess
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generates barcode for Authenticator and creates totp for user"
|
||||
help = "Generates barcode for Google Authenticator and creates totp for user"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("code", type=str)
|
||||
@@ -26,10 +24,12 @@ class Command(BaseCommand):
|
||||
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
||||
subprocess.run(f'qr "{url}"', shell=True)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Scan the barcode above with your authenticator app")
|
||||
self.style.SUCCESS(
|
||||
"Scan the barcode above with your google authenticator app"
|
||||
)
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"If that doesn't work you may manually enter the setup key: {code}"
|
||||
f"If that doesn't work you may manually enter the key: {code}"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
import subprocess
|
||||
|
||||
import pyotp
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
from tacticalrmm.util_settings import get_webdomain
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Reset 2fa"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("username", type=str)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
username = kwargs["username"]
|
||||
try:
|
||||
user = User.objects.get(username=username)
|
||||
except User.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||
return
|
||||
|
||||
code = pyotp.random_base32()
|
||||
user.totp_key = code
|
||||
user.save(update_fields=["totp_key"])
|
||||
|
||||
url = pyotp.totp.TOTP(code).provisioning_uri(
|
||||
username, issuer_name=get_webdomain(settings.CORS_ORIGIN_WHITELIST[0])
|
||||
)
|
||||
subprocess.run(f'qr "{url}"', shell=True)
|
||||
self.stdout.write(
|
||||
self.style.WARNING("Scan the barcode above with your authenticator app")
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f"If that doesn't work you may manually enter the setup key: {code}"
|
||||
)
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"2fa was successfully reset for user {username}")
|
||||
)
|
||||
@@ -1,31 +0,0 @@
|
||||
from getpass import getpass
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Reset password for user"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("username", type=str)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
username = kwargs["username"]
|
||||
try:
|
||||
user = User.objects.get(username=username)
|
||||
except User.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||
return
|
||||
|
||||
pass1, pass2 = "foo", "bar"
|
||||
while pass1 != pass2:
|
||||
pass1 = getpass()
|
||||
pass2 = getpass(prompt="Confirm Password:")
|
||||
if pass1 != pass2:
|
||||
self.stdout.write(self.style.ERROR("Passwords don't match"))
|
||||
|
||||
user.set_password(pass1)
|
||||
user.save()
|
||||
self.stdout.write(self.style.SUCCESS(f"Password for {username} was reset!"))
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
import django.contrib.auth.models
|
||||
import django.contrib.auth.validators
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
import django.utils.timezone
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0002_auto_20200810_0544"),
|
||||
('accounts', '0002_auto_20200810_0544'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,24 +6,24 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0003_auto_20200922_1344"),
|
||||
('accounts', '0003_auto_20200922_1344'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0004_auto_20201002_1257"),
|
||||
('accounts', '0004_auto_20201002_1257'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-10 20:24
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0024_auto_20201101_2319"),
|
||||
("accounts", "0005_auto_20201002_1303"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="agent",
|
||||
field=models.OneToOneField(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="user",
|
||||
to="agents.agent",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,25 +0,0 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-01 22:54
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def link_agents_to_users(apps, schema_editor):
|
||||
Agent = apps.get_model("agents", "Agent")
|
||||
User = apps.get_model("accounts", "User")
|
||||
for agent in Agent.objects.all():
|
||||
user = User.objects.filter(username=agent.agent_id).first()
|
||||
|
||||
if user:
|
||||
user.agent = agent
|
||||
user.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0006_user_agent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(link_agents_to_users, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-12 00:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0007_update_agent_primary_key"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="dark_mode",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-10 17:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0008_user_dark_mode"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="show_community_scripts",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -1,26 +0,0 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-14 01:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0009_user_show_community_scripts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="agent_dblclick_action",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("editagent", "Edit Agent"),
|
||||
("takecontrol", "Take Control"),
|
||||
("remotebg", "Remote Background"),
|
||||
],
|
||||
default="editagent",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,26 +0,0 @@
|
||||
# Generated by Django 3.1.5 on 2021-01-18 09:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0010_user_agent_dblclick_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="default_agent_tbl_tab",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("server", "Servers"),
|
||||
("workstation", "Workstations"),
|
||||
("mixed", "Mixed"),
|
||||
],
|
||||
default="server",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.1.7 on 2021-02-28 06:38
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0011_user_default_agent_tbl_tab'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='agents_per_page',
|
||||
field=models.PositiveIntegerField(default=50),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-09 02:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0012_user_agents_per_page'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='client_tree_sort',
|
||||
field=models.CharField(choices=[('alphafail', 'Move failing clients to the top'), ('alpha', 'Sort alphabetically')], default='alphafail', max_length=50),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.2 on 2021-04-11 01:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0013_user_client_tree_sort'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='client_tree_splitter',
|
||||
field=models.PositiveIntegerField(default=11),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.2 on 2021-04-11 03:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0014_user_client_tree_splitter'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='loading_bar_color',
|
||||
field=models.CharField(default='red', max_length=255),
|
||||
),
|
||||
]
|
||||
@@ -1,25 +0,0 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-07 15:26
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0022_urlaction'),
|
||||
('accounts', '0015_user_loading_bar_color'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='url_action',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='user', to='core.urlaction'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='agent_dblclick_action',
|
||||
field=models.CharField(choices=[('editagent', 'Edit Agent'), ('takecontrol', 'Take Control'), ('remotebg', 'Remote Background'), ('urlaction', 'URL Action')], default='editagent', max_length=50),
|
||||
),
|
||||
]
|
||||
@@ -1,173 +0,0 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-08 17:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0016_auto_20210507_1526'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_code_sign',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_do_server_maint',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_edit_agent',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_edit_core_settings',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_install_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_accounts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_alerts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_automation_policies',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_autotasks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_checks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_clients',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_deployments',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_notes',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_pendingactions',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_procs',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_scripts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_sites',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_software',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_winsvcs',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_winupdates',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_reboot_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_run_autotasks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_run_bulk',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_run_checks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_run_scripts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_send_cmd',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_uninstall_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_update_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_use_mesh',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_view_auditlogs',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_view_debuglogs',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_view_eventlogs',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,181 +0,0 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-11 02:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0017_auto_20210508_1716'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Role',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=255, unique=True)),
|
||||
('is_superuser', models.BooleanField(default=False)),
|
||||
('can_use_mesh', models.BooleanField(default=False)),
|
||||
('can_uninstall_agents', models.BooleanField(default=False)),
|
||||
('can_update_agents', models.BooleanField(default=False)),
|
||||
('can_edit_agent', models.BooleanField(default=False)),
|
||||
('can_manage_procs', models.BooleanField(default=False)),
|
||||
('can_view_eventlogs', models.BooleanField(default=False)),
|
||||
('can_send_cmd', models.BooleanField(default=False)),
|
||||
('can_reboot_agents', models.BooleanField(default=False)),
|
||||
('can_install_agents', models.BooleanField(default=False)),
|
||||
('can_run_scripts', models.BooleanField(default=False)),
|
||||
('can_run_bulk', models.BooleanField(default=False)),
|
||||
('can_manage_notes', models.BooleanField(default=False)),
|
||||
('can_edit_core_settings', models.BooleanField(default=False)),
|
||||
('can_do_server_maint', models.BooleanField(default=False)),
|
||||
('can_code_sign', models.BooleanField(default=False)),
|
||||
('can_manage_checks', models.BooleanField(default=False)),
|
||||
('can_run_checks', models.BooleanField(default=False)),
|
||||
('can_manage_clients', models.BooleanField(default=False)),
|
||||
('can_manage_sites', models.BooleanField(default=False)),
|
||||
('can_manage_deployments', models.BooleanField(default=False)),
|
||||
('can_manage_automation_policies', models.BooleanField(default=False)),
|
||||
('can_manage_autotasks', models.BooleanField(default=False)),
|
||||
('can_run_autotasks', models.BooleanField(default=False)),
|
||||
('can_view_auditlogs', models.BooleanField(default=False)),
|
||||
('can_manage_pendingactions', models.BooleanField(default=False)),
|
||||
('can_view_debuglogs', models.BooleanField(default=False)),
|
||||
('can_manage_scripts', models.BooleanField(default=False)),
|
||||
('can_manage_alerts', models.BooleanField(default=False)),
|
||||
('can_manage_winsvcs', models.BooleanField(default=False)),
|
||||
('can_manage_software', models.BooleanField(default=False)),
|
||||
('can_manage_winupdates', models.BooleanField(default=False)),
|
||||
('can_manage_accounts', models.BooleanField(default=False)),
|
||||
],
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_code_sign',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_do_server_maint',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_edit_agent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_edit_core_settings',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_install_agents',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_accounts',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_alerts',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_automation_policies',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_autotasks',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_checks',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_clients',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_deployments',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_notes',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_pendingactions',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_procs',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_scripts',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_sites',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_software',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_winsvcs',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_winupdates',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_reboot_agents',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_run_autotasks',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_run_bulk',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_run_checks',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_run_scripts',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_send_cmd',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_uninstall_agents',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_update_agents',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_use_mesh',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_view_auditlogs',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_view_debuglogs',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_view_eventlogs',
|
||||
),
|
||||
]
|
||||
@@ -1,25 +0,0 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-11 02:33
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0018_auto_20210511_0233"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="roles",
|
||||
to="accounts.role",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-11 17:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0019_user_role'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_manage_roles',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-17 04:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0020_role_can_manage_roles'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_view_core_settings',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-28 05:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0021_role_can_view_core_settings'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='clear_search_when_switching',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-30 03:22
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0022_user_clear_search_when_switching'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='is_installer_user',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-20 20:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0023_user_is_installer_user'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='last_login_ip',
|
||||
field=models.GenericIPAddressField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,33 +0,0 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0024_user_last_login_ip'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,34 +0,0 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-01 12:47
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0025_auto_20210721_0424'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='APIKey',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created_by', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('created_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||
('modified_by', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('modified_time', models.DateTimeField(auto_now=True, null=True)),
|
||||
('name', models.CharField(max_length=25, unique=True)),
|
||||
('key', models.CharField(blank=True, max_length=48, unique=True)),
|
||||
('expiration', models.DateTimeField(blank=True, default=None, null=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_manage_api_keys',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,25 +0,0 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-03 00:54
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0026_auto_20210901_1247'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='apikey',
|
||||
name='user',
|
||||
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='api_key', to='accounts.user'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='block_dashboard_login',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,150 +0,0 @@
|
||||
# Generated by Django 3.2.6 on 2021-10-10 02:49
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0018_auto_20211010_0249'),
|
||||
('accounts', '0027_auto_20210903_0054'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_accounts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_agent_history',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_alerts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_api_keys',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_automation_policies',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_autotasks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_checks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_clients',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_deployments',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_notes',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_pendingactions',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_roles',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_scripts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_sites',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_software',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_ping_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_recover_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_view_clients',
|
||||
field=models.ManyToManyField(blank=True, related_name='role_clients', to='clients.Client'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_view_sites',
|
||||
field=models.ManyToManyField(blank=True, related_name='role_sites', to='clients.Site'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='apikey',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='apikey',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='role',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='role',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='role',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='users', to='accounts.role'),
|
||||
),
|
||||
]
|
||||
@@ -1,28 +0,0 @@
|
||||
# Generated by Django 3.2.6 on 2021-10-22 22:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0028_auto_20211010_0249'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_alerttemplates',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_manage_alerttemplates',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_run_urlactions',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,23 +0,0 @@
|
||||
# Generated by Django 3.2.6 on 2021-11-04 02:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0029_auto_20211022_2245'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_manage_customfields',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_view_customfields',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.2.12 on 2022-04-02 15:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0030_auto_20211104_0221'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='date_format',
|
||||
field=models.CharField(blank=True, max_length=30, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,25 +0,0 @@
|
||||
# Generated by Django 4.2.1 on 2023-05-17 07:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0031_user_date_format"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="default_agent_tbl_tab",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("server", "Servers"),
|
||||
("workstation", "Workstations"),
|
||||
("mixed", "Mixed"),
|
||||
],
|
||||
default="mixed",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,32 +0,0 @@
|
||||
# Generated by Django 4.2.1 on 2023-05-23 04:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0032_alter_user_default_agent_tbl_tab"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="dash_info_color",
|
||||
field=models.CharField(default="info", max_length=255),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="dash_negative_color",
|
||||
field=models.CharField(default="negative", max_length=255),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="dash_positive_color",
|
||||
field=models.CharField(default="positive", max_length=255),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="dash_warning_color",
|
||||
field=models.CharField(default="warning", max_length=255),
|
||||
),
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
# Generated by Django 4.1.9 on 2023-05-26 23:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0033_user_dash_info_color_user_dash_negative_color_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_send_wol",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,22 +0,0 @@
|
||||
# Generated by Django 4.2.5 on 2023-10-08 22:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0034_role_can_send_wol"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_manage_reports",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_view_reports",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,16 +0,0 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-09 19:57
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0035_role_can_manage_reports_role_can_view_reports"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="role",
|
||||
name="can_ping_agents",
|
||||
),
|
||||
]
|
||||
@@ -1,23 +0,0 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 20:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0036_remove_role_can_ping_agents"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_run_server_scripts",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_use_webterm",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,23 +0,0 @@
|
||||
# Generated by Django 4.2.16 on 2024-10-06 05:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0037_role_can_run_server_scripts_role_can_use_webterm"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_edit_global_keystore",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_view_global_keystore",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,82 +1,12 @@
|
||||
from typing import Optional
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.db.models.fields import CharField, DateTimeField
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.constants import (
|
||||
ROLE_CACHE_PREFIX,
|
||||
AgentDblClick,
|
||||
AgentTableTabs,
|
||||
ClientTreeSort,
|
||||
)
|
||||
|
||||
|
||||
class User(AbstractUser, BaseAuditModel):
|
||||
is_active = models.BooleanField(default=True)
|
||||
block_dashboard_login = models.BooleanField(default=False)
|
||||
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
||||
dark_mode = models.BooleanField(default=True)
|
||||
show_community_scripts = models.BooleanField(default=True)
|
||||
agent_dblclick_action: "AgentDblClick" = models.CharField(
|
||||
max_length=50, choices=AgentDblClick.choices, default=AgentDblClick.EDIT_AGENT
|
||||
)
|
||||
url_action = models.ForeignKey(
|
||||
"core.URLAction",
|
||||
related_name="user",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
default_agent_tbl_tab = models.CharField(
|
||||
max_length=50, choices=AgentTableTabs.choices, default=AgentTableTabs.MIXED
|
||||
)
|
||||
agents_per_page = models.PositiveIntegerField(default=50) # not currently used
|
||||
client_tree_sort = models.CharField(
|
||||
max_length=50, choices=ClientTreeSort.choices, default=ClientTreeSort.ALPHA_FAIL
|
||||
)
|
||||
client_tree_splitter = models.PositiveIntegerField(default=11)
|
||||
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||
dash_info_color = models.CharField(max_length=255, default="info")
|
||||
dash_positive_color = models.CharField(max_length=255, default="positive")
|
||||
dash_negative_color = models.CharField(max_length=255, default="negative")
|
||||
dash_warning_color = models.CharField(max_length=255, default="warning")
|
||||
clear_search_when_switching = models.BooleanField(default=True)
|
||||
date_format = models.CharField(max_length=30, blank=True, null=True)
|
||||
is_installer_user = models.BooleanField(default=False)
|
||||
last_login_ip = models.GenericIPAddressField(default=None, blank=True, null=True)
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
related_name="user",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
role = models.ForeignKey(
|
||||
"accounts.Role",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="users",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
@property
|
||||
def mesh_user_id(self):
|
||||
return f"user//{self.mesh_username}"
|
||||
|
||||
@property
|
||||
def mesh_username(self):
|
||||
# lower() needed for mesh api
|
||||
return f"{self.username.replace(' ', '').lower()}___{self.pk}"
|
||||
|
||||
@property
|
||||
def is_sso_user(self):
|
||||
return SocialAccount.objects.filter(user_id=self.pk).exists()
|
||||
|
||||
@staticmethod
|
||||
def serialize(user):
|
||||
@@ -84,157 +14,3 @@ class User(AbstractUser, BaseAuditModel):
|
||||
from .serializers import UserSerializer
|
||||
|
||||
return UserSerializer(user).data
|
||||
|
||||
def get_and_set_role_cache(self) -> "Optional[Role]":
|
||||
role = cache.get(f"{ROLE_CACHE_PREFIX}{self.role}")
|
||||
|
||||
if role and isinstance(role, Role):
|
||||
return role
|
||||
elif not role and not self.role:
|
||||
return None
|
||||
else:
|
||||
models.prefetch_related_objects(
|
||||
[self.role],
|
||||
"can_view_clients",
|
||||
"can_view_sites",
|
||||
)
|
||||
|
||||
cache.set(f"{ROLE_CACHE_PREFIX}{self.role}", self.role, 600)
|
||||
return self.role
|
||||
|
||||
|
||||
class Role(BaseAuditModel):
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
is_superuser = models.BooleanField(default=False)
|
||||
|
||||
# agents
|
||||
can_list_agents = models.BooleanField(default=False)
|
||||
can_use_mesh = models.BooleanField(default=False)
|
||||
can_uninstall_agents = models.BooleanField(default=False)
|
||||
can_update_agents = models.BooleanField(default=False)
|
||||
can_edit_agent = models.BooleanField(default=False)
|
||||
can_manage_procs = models.BooleanField(default=False)
|
||||
can_view_eventlogs = models.BooleanField(default=False)
|
||||
can_send_cmd = models.BooleanField(default=False)
|
||||
can_reboot_agents = models.BooleanField(default=False)
|
||||
can_install_agents = models.BooleanField(default=False)
|
||||
can_run_scripts = models.BooleanField(default=False)
|
||||
can_run_bulk = models.BooleanField(default=False)
|
||||
can_recover_agents = models.BooleanField(default=False)
|
||||
can_list_agent_history = models.BooleanField(default=False)
|
||||
can_send_wol = models.BooleanField(default=False)
|
||||
|
||||
# core
|
||||
can_list_notes = models.BooleanField(default=False)
|
||||
can_manage_notes = models.BooleanField(default=False)
|
||||
can_view_core_settings = models.BooleanField(default=False)
|
||||
can_edit_core_settings = models.BooleanField(default=False)
|
||||
can_do_server_maint = models.BooleanField(default=False)
|
||||
can_code_sign = models.BooleanField(default=False)
|
||||
can_run_urlactions = models.BooleanField(default=False)
|
||||
can_view_customfields = models.BooleanField(default=False)
|
||||
can_manage_customfields = models.BooleanField(default=False)
|
||||
can_run_server_scripts = models.BooleanField(default=False)
|
||||
can_use_webterm = models.BooleanField(default=False)
|
||||
can_view_global_keystore = models.BooleanField(default=False)
|
||||
can_edit_global_keystore = models.BooleanField(default=False)
|
||||
|
||||
# checks
|
||||
can_list_checks = models.BooleanField(default=False)
|
||||
can_manage_checks = models.BooleanField(default=False)
|
||||
can_run_checks = models.BooleanField(default=False)
|
||||
|
||||
# clients
|
||||
can_list_clients = models.BooleanField(default=False)
|
||||
can_manage_clients = models.BooleanField(default=False)
|
||||
can_list_sites = models.BooleanField(default=False)
|
||||
can_manage_sites = models.BooleanField(default=False)
|
||||
can_list_deployments = models.BooleanField(default=False)
|
||||
can_manage_deployments = models.BooleanField(default=False)
|
||||
can_view_clients = models.ManyToManyField(
|
||||
"clients.Client", related_name="role_clients", blank=True
|
||||
)
|
||||
can_view_sites = models.ManyToManyField(
|
||||
"clients.Site", related_name="role_sites", blank=True
|
||||
)
|
||||
|
||||
# automation
|
||||
can_list_automation_policies = models.BooleanField(default=False)
|
||||
can_manage_automation_policies = models.BooleanField(default=False)
|
||||
|
||||
# automated tasks
|
||||
can_list_autotasks = models.BooleanField(default=False)
|
||||
can_manage_autotasks = models.BooleanField(default=False)
|
||||
can_run_autotasks = models.BooleanField(default=False)
|
||||
|
||||
# logs
|
||||
can_view_auditlogs = models.BooleanField(default=False)
|
||||
can_list_pendingactions = models.BooleanField(default=False)
|
||||
can_manage_pendingactions = models.BooleanField(default=False)
|
||||
can_view_debuglogs = models.BooleanField(default=False)
|
||||
|
||||
# scripts
|
||||
can_list_scripts = models.BooleanField(default=False)
|
||||
can_manage_scripts = models.BooleanField(default=False)
|
||||
|
||||
# alerts
|
||||
can_list_alerts = models.BooleanField(default=False)
|
||||
can_manage_alerts = models.BooleanField(default=False)
|
||||
can_list_alerttemplates = models.BooleanField(default=False)
|
||||
can_manage_alerttemplates = models.BooleanField(default=False)
|
||||
|
||||
# win services
|
||||
can_manage_winsvcs = models.BooleanField(default=False)
|
||||
|
||||
# software
|
||||
can_list_software = models.BooleanField(default=False)
|
||||
can_manage_software = models.BooleanField(default=False)
|
||||
|
||||
# windows updates
|
||||
can_manage_winupdates = models.BooleanField(default=False)
|
||||
|
||||
# accounts
|
||||
can_list_accounts = models.BooleanField(default=False)
|
||||
can_manage_accounts = models.BooleanField(default=False)
|
||||
can_list_roles = models.BooleanField(default=False)
|
||||
can_manage_roles = models.BooleanField(default=False)
|
||||
|
||||
# authentication
|
||||
can_list_api_keys = models.BooleanField(default=False)
|
||||
can_manage_api_keys = models.BooleanField(default=False)
|
||||
|
||||
# reporting
|
||||
can_view_reports = models.BooleanField(default=False)
|
||||
can_manage_reports = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs) -> None:
|
||||
# delete cache on save
|
||||
cache.delete(f"{ROLE_CACHE_PREFIX}{self.name}")
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def serialize(role):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import RoleAuditSerializer
|
||||
|
||||
return RoleAuditSerializer(role).data
|
||||
|
||||
|
||||
class APIKey(BaseAuditModel):
|
||||
name = CharField(unique=True, max_length=25)
|
||||
key = CharField(unique=True, blank=True, max_length=48)
|
||||
expiration = DateTimeField(blank=True, null=True, default=None)
|
||||
user = models.ForeignKey(
|
||||
"accounts.User",
|
||||
related_name="api_key",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def serialize(apikey):
|
||||
from .serializers import APIKeyAuditSerializer
|
||||
|
||||
return APIKeyAuditSerializer(apikey).data
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.permissions import _has_perm
|
||||
from tacticalrmm.utils import get_core_settings
|
||||
|
||||
|
||||
class AccountsPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_accounts")
|
||||
|
||||
# allow users to reset their own password/2fa see issue #686
|
||||
base_path = "/accounts/users/"
|
||||
paths = ("reset/", "reset_totp/")
|
||||
|
||||
if r.path in [base_path + i for i in paths]:
|
||||
from accounts.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(pk=r.data["id"])
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
else:
|
||||
if user == r.user:
|
||||
return True
|
||||
|
||||
return _has_perm(r, "can_manage_accounts")
|
||||
|
||||
|
||||
class RolesPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_roles")
|
||||
|
||||
return _has_perm(r, "can_manage_roles")
|
||||
|
||||
|
||||
class APIKeyPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_api_keys")
|
||||
|
||||
return _has_perm(r, "can_manage_api_keys")
|
||||
|
||||
|
||||
class LocalUserPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
settings = get_core_settings()
|
||||
return not settings.block_local_user_logon
|
||||
|
||||
|
||||
class SelfResetSSOPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return not r.user.is_sso_user
|
||||
@@ -1,42 +1,17 @@
|
||||
import pyotp
|
||||
from django.conf import settings
|
||||
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
|
||||
from tacticalrmm.util_settings import get_webdomain
|
||||
|
||||
from .models import APIKey, Role, User
|
||||
|
||||
|
||||
class UserUISerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"dark_mode",
|
||||
"show_community_scripts",
|
||||
"agent_dblclick_action",
|
||||
"url_action",
|
||||
"default_agent_tbl_tab",
|
||||
"client_tree_sort",
|
||||
"client_tree_splitter",
|
||||
"loading_bar_color",
|
||||
"dash_info_color",
|
||||
"dash_positive_color",
|
||||
"dash_negative_color",
|
||||
"dash_warning_color",
|
||||
"clear_search_when_switching",
|
||||
"block_dashboard_login",
|
||||
"date_format",
|
||||
]
|
||||
from .models import User
|
||||
|
||||
|
||||
class UserSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
fields = (
|
||||
"id",
|
||||
"username",
|
||||
"first_name",
|
||||
@@ -44,14 +19,11 @@ class UserSerializer(ModelSerializer):
|
||||
"email",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"last_login_ip",
|
||||
"role",
|
||||
"block_dashboard_login",
|
||||
"date_format",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class TOTPSetupSerializer(ModelSerializer):
|
||||
|
||||
qr_url = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
@@ -64,42 +36,5 @@ class TOTPSetupSerializer(ModelSerializer):
|
||||
|
||||
def get_qr_url(self, obj):
|
||||
return pyotp.totp.TOTP(obj.totp_key).provisioning_uri(
|
||||
obj.username, issuer_name=get_webdomain(settings.CORS_ORIGIN_WHITELIST[0])
|
||||
obj.username, issuer_name="Tactical RMM"
|
||||
)
|
||||
|
||||
|
||||
class RoleSerializer(ModelSerializer):
|
||||
user_count = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = "__all__"
|
||||
|
||||
def get_user_count(self, obj):
|
||||
return obj.users.count()
|
||||
|
||||
|
||||
class RoleAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class APIKeySerializer(ModelSerializer):
|
||||
username = ReadOnlyField(source="user.username")
|
||||
|
||||
class Meta:
|
||||
model = APIKey
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class APIKeyAuditSerializer(ModelSerializer):
|
||||
username = ReadOnlyField(source="user.username")
|
||||
|
||||
class Meta:
|
||||
model = APIKey
|
||||
fields = [
|
||||
"name",
|
||||
"username",
|
||||
"expiration",
|
||||
]
|
||||
|
||||
@@ -1,57 +1,46 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import override_settings
|
||||
from model_bakery import baker, seq
|
||||
|
||||
from accounts.models import APIKey, User
|
||||
from accounts.serializers import APIKeySerializer
|
||||
from tacticalrmm.constants import AgentDblClick, AgentTableTabs, ClientTreeSort
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
class TestAccounts(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.setup_client()
|
||||
self.client_setup()
|
||||
self.bob = User(username="bob")
|
||||
self.bob.set_password("hunter2")
|
||||
self.bob.save()
|
||||
|
||||
def test_check_creds(self):
|
||||
url = "/v2/checkcreds/"
|
||||
url = "/checkcreds/"
|
||||
|
||||
data = {"username": "bob", "password": "hunter2"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn("totp", r.data.keys())
|
||||
self.assertEqual(r.data["totp"], False)
|
||||
self.assertEqual(r.data["totp"], "totp not set")
|
||||
|
||||
data = {"username": "bob", "password": "a3asdsa2314"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "Bad credentials")
|
||||
self.assertEqual(r.data, "bad credentials")
|
||||
|
||||
data = {"username": "billy", "password": "hunter2"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "Bad credentials")
|
||||
self.assertEqual(r.data, "bad credentials")
|
||||
|
||||
self.bob.totp_key = "AB5RI6YPFTZAS52G"
|
||||
self.bob.save()
|
||||
data = {"username": "bob", "password": "hunter2"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data["totp"], True)
|
||||
|
||||
# test user set to block dashboard logins
|
||||
self.bob.block_dashboard_login = True
|
||||
self.bob.save()
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "ok")
|
||||
|
||||
@patch("pyotp.TOTP.verify")
|
||||
def test_login_view(self, mock_verify):
|
||||
url = "/v2/login/"
|
||||
url = "/login/"
|
||||
|
||||
mock_verify.return_value = True
|
||||
data = {"username": "bob", "password": "hunter2", "twofactor": "123456"}
|
||||
@@ -63,7 +52,7 @@ class TestAccounts(TacticalTestCase):
|
||||
mock_verify.return_value = False
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "Bad credentials")
|
||||
self.assertEqual(r.data, "bad credentials")
|
||||
|
||||
mock_verify.return_value = True
|
||||
data = {"username": "bob", "password": "asd234234asd", "twofactor": "123456"}
|
||||
@@ -71,17 +60,17 @@ class TestAccounts(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("non_field_errors", r.data.keys())
|
||||
|
||||
# @override_settings(DEBUG=True)
|
||||
# @patch("pyotp.TOTP.verify")
|
||||
# def test_debug_login_view(self, mock_verify):
|
||||
# url = "/login/"
|
||||
# mock_verify.return_value = True
|
||||
@override_settings(DEBUG=True)
|
||||
@patch("pyotp.TOTP.verify")
|
||||
def test_debug_login_view(self, mock_verify):
|
||||
url = "/login/"
|
||||
mock_verify.return_value = True
|
||||
|
||||
# data = {"username": "bob", "password": "hunter2", "twofactor": "sekret"}
|
||||
# r = self.client.post(url, data, format="json")
|
||||
# self.assertEqual(r.status_code, 200)
|
||||
# self.assertIn("expiry", r.data.keys())
|
||||
# self.assertIn("token", r.data.keys())
|
||||
data = {"username": "bob", "password": "hunter2", "twofactor": "sekret"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn("expiry", r.data.keys())
|
||||
self.assertIn("token", r.data.keys())
|
||||
|
||||
|
||||
class TestGetAddUsers(TacticalTestCase):
|
||||
@@ -166,57 +155,17 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
data = {
|
||||
"id": self.john.pk,
|
||||
"username": "john",
|
||||
"email": "johndoe@xlawgaming.com",
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
}
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_not_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
data = {
|
||||
"id": self.john.pk,
|
||||
"username": "john",
|
||||
"email": "johndoe@xlawgaming.com",
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_delete(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
url = "/accounts/893452/users/"
|
||||
url = f"/accounts/893452/users/"
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_delete_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_delete_non_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
|
||||
class TestUserAction(TacticalTestCase):
|
||||
def setUp(self):
|
||||
@@ -235,21 +184,6 @@ class TestUserAction(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_post_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_post_non_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_put(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
@@ -261,125 +195,6 @@ class TestUserAction(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
user = User.objects.get(pk=self.john.pk)
|
||||
self.assertEqual(user.totp_key, "")
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_non_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_user_ui(self):
|
||||
url = "/accounts/users/ui/"
|
||||
|
||||
data = {
|
||||
"dark_mode": True,
|
||||
"show_community_scripts": True,
|
||||
"agent_dblclick_action": AgentDblClick.EDIT_AGENT,
|
||||
"default_agent_tbl_tab": AgentTableTabs.MIXED,
|
||||
"client_tree_sort": ClientTreeSort.ALPHA,
|
||||
"client_tree_splitter": 14,
|
||||
"loading_bar_color": "green",
|
||||
"clear_search_when_switching": False,
|
||||
}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestUserReset(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_reset_pw(self):
|
||||
url = "/accounts/resetpw/"
|
||||
data = {"password": "superSekret123456"}
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_reset_2fa(self):
|
||||
url = "/accounts/reset2fa/"
|
||||
r = self.client.put(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
|
||||
class TestAPIKeyViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.authenticate()
|
||||
|
||||
def test_get_api_keys(self):
|
||||
url = "/accounts/apikeys/"
|
||||
apikeys = baker.make("accounts.APIKey", key=seq("APIKEY"), _quantity=3)
|
||||
|
||||
serializer = APIKeySerializer(apikeys, many=True)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_api_keys(self):
|
||||
url = "/accounts/apikeys/"
|
||||
|
||||
user = baker.make("accounts.User")
|
||||
data = {"name": "Name", "user": user.id, "expiration": None}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(APIKey.objects.filter(name="Name").exists())
|
||||
self.assertTrue(APIKey.objects.get(name="Name").key)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_modify_api_key(self):
|
||||
# test a call where api key doesn't exist
|
||||
resp = self.client.put("/accounts/apikeys/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
apikey = baker.make("accounts.APIKey", name="Test")
|
||||
url = f"/accounts/apikeys/{apikey.pk}/"
|
||||
|
||||
data = {"name": "New Name"}
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
apikey = APIKey.objects.get(pk=apikey.pk)
|
||||
self.assertEqual(apikey.name, "New Name")
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_api_key(self):
|
||||
# test a call where api key doesn't exist
|
||||
resp = self.client.delete("/accounts/apikeys/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete api key
|
||||
apikey = baker.make("accounts.APIKey")
|
||||
url = f"/accounts/apikeys/{apikey.pk}/"
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.assertFalse(APIKey.objects.filter(pk=apikey.pk).exists())
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
|
||||
class TestTOTPSetup(TacticalTestCase):
|
||||
def setUp(self):
|
||||
@@ -405,30 +220,4 @@ class TestTOTPSetup(TacticalTestCase):
|
||||
|
||||
r = self.client.post(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, False)
|
||||
|
||||
|
||||
class TestAPIAuthentication(TacticalTestCase):
|
||||
def setUp(self):
|
||||
# create User and associate to API Key
|
||||
self.user = User.objects.create(username="api_user", is_superuser=True)
|
||||
self.api_key = APIKey.objects.create(
|
||||
name="Test Token", key="123456", user=self.user
|
||||
)
|
||||
|
||||
self.setup_client()
|
||||
|
||||
def test_api_auth(self):
|
||||
url = "/clients/"
|
||||
# auth should fail if no header set
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
# invalid api key in header should return code 400
|
||||
self.client.credentials(HTTP_X_API_KEY="000000")
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 401)
|
||||
|
||||
# valid api key in header should return code 200
|
||||
self.client.credentials(HTTP_X_API_KEY="123456")
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "totp token already set")
|
||||
|
||||
@@ -1,22 +1,10 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("users/", views.GetAddUsers.as_view()),
|
||||
path("<int:pk>/users/", views.GetUpdateDeleteUser.as_view()),
|
||||
path("sessions/<str:pk>/", views.DeleteActiveLoginSession.as_view()),
|
||||
path(
|
||||
"users/<int:pk>/sessions/", views.GetDeleteActiveLoginSessionsPerUser.as_view()
|
||||
),
|
||||
path("users/reset/", views.UserActions.as_view()),
|
||||
path("users/reset_totp/", views.UserActions.as_view()),
|
||||
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
||||
path("users/ui/", views.UserUI.as_view()),
|
||||
path("roles/", views.GetAddRoles.as_view()),
|
||||
path("roles/<int:pk>/", views.GetUpdateDeleteRole.as_view()),
|
||||
path("apikeys/", views.GetAddAPIKeys.as_view()),
|
||||
path("apikeys/<int:pk>/", views.GetUpdateDeleteAPIKey.as_view()),
|
||||
path("resetpw/", views.ResetPass.as_view()),
|
||||
path("reset2fa/", views.Reset2FA.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.http import HttpRequest
|
||||
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
def is_root_user(*, request: "HttpRequest", user: "User") -> bool:
|
||||
root = (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
)
|
||||
demo = (
|
||||
getattr(settings, "DEMO", False) and request.user.username == settings.ROOT_USER
|
||||
)
|
||||
return root or demo
|
||||
|
||||
|
||||
def is_superuser(user: "User") -> bool:
|
||||
return user.role and getattr(user.role, "is_superuser")
|
||||
@@ -1,85 +1,51 @@
|
||||
import datetime
|
||||
|
||||
import pyotp
|
||||
from allauth.socialaccount.models import SocialAccount, SocialApp
|
||||
from django.conf import settings
|
||||
|
||||
from django.contrib.auth import login
|
||||
from django.db import IntegrityError
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from knox.models import AuthToken
|
||||
from knox.views import LoginView as KnoxLoginView
|
||||
from python_ipware import IpWare
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from django.db import IntegrityError
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from knox.views import LoginView as KnoxLoginView
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from accounts.utils import is_root_user
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from .models import User
|
||||
from agents.models import Agent
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.helpers import notify_error
|
||||
from tacticalrmm.utils import get_core_settings
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import APIKey, Role, User
|
||||
from .permissions import (
|
||||
AccountsPerms,
|
||||
APIKeyPerms,
|
||||
LocalUserPerms,
|
||||
RolesPerms,
|
||||
SelfResetSSOPerms,
|
||||
)
|
||||
from .serializers import (
|
||||
APIKeySerializer,
|
||||
RoleSerializer,
|
||||
TOTPSetupSerializer,
|
||||
UserSerializer,
|
||||
UserUISerializer,
|
||||
)
|
||||
from .serializers import UserSerializer, TOTPSetupSerializer
|
||||
|
||||
|
||||
class CheckCredsV2(KnoxLoginView):
|
||||
class CheckCreds(KnoxLoginView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
# restrict time on tokens issued by this view to 3 min
|
||||
def get_token_ttl(self):
|
||||
return datetime.timedelta(seconds=180)
|
||||
|
||||
def post(self, request, format=None):
|
||||
|
||||
# check credentials
|
||||
serializer = AuthTokenSerializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
AuditLog.audit_user_failed_login(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return notify_error("Bad credentials")
|
||||
AuditLog.audit_user_failed_login(request.data["username"])
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
user = serializer.validated_data["user"]
|
||||
|
||||
if user.block_dashboard_login or user.is_sso_user:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
# block local logon if configured
|
||||
core_settings = get_core_settings()
|
||||
if not user.is_superuser and core_settings.block_local_user_logon:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
# if totp token not set modify response to notify frontend
|
||||
if not user.totp_key:
|
||||
login(request, user)
|
||||
response = super().post(request, format=None)
|
||||
response.data["totp"] = False
|
||||
response = super(CheckCreds, self).post(request, format=None)
|
||||
response.data["totp"] = "totp not set"
|
||||
return response
|
||||
|
||||
return Response({"totp": True})
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class LoginViewV2(KnoxLoginView):
|
||||
class LoginView(KnoxLoginView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def post(self, request, format=None):
|
||||
@@ -89,162 +55,34 @@ class LoginViewV2(KnoxLoginView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
user = serializer.validated_data["user"]
|
||||
|
||||
if user.block_dashboard_login:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
# block local logon if configured
|
||||
core_settings = get_core_settings()
|
||||
if not user.is_superuser and core_settings.block_local_user_logon:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
if user.is_sso_user:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
token = request.data["twofactor"]
|
||||
totp = pyotp.TOTP(user.totp_key)
|
||||
|
||||
if settings.DEBUG and token == "sekret":
|
||||
valid = True
|
||||
elif getattr(settings, "DEMO", False):
|
||||
valid = True
|
||||
elif totp.verify(token, valid_window=10):
|
||||
elif totp.verify(token, valid_window=1):
|
||||
valid = True
|
||||
|
||||
if valid:
|
||||
login(request, user)
|
||||
|
||||
# save ip information
|
||||
ipw = IpWare()
|
||||
client_ip, _ = ipw.get_client_ip(request.META)
|
||||
if client_ip:
|
||||
user.last_login_ip = str(client_ip)
|
||||
user.save()
|
||||
|
||||
AuditLog.audit_user_login_successful(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
response = super().post(request, format=None)
|
||||
response.data["username"] = request.user.username
|
||||
response.data["name"] = None
|
||||
|
||||
return Response(response.data)
|
||||
AuditLog.audit_user_login_successful(request.data["username"])
|
||||
return super(LoginView, self).post(request, format=None)
|
||||
else:
|
||||
AuditLog.audit_user_failed_twofactor(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
|
||||
class GetDeleteActiveLoginSessionsPerUser(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
class TokenSerializer(ModelSerializer):
|
||||
user = ReadOnlyField(source="user.username")
|
||||
|
||||
class Meta:
|
||||
model = AuthToken
|
||||
fields = (
|
||||
"digest",
|
||||
"user",
|
||||
"created",
|
||||
"expiry",
|
||||
)
|
||||
|
||||
def get(self, request, pk):
|
||||
tokens = get_object_or_404(User, pk=pk).auth_token_set.filter(
|
||||
expiry__gt=djangotime.now()
|
||||
)
|
||||
|
||||
return Response(self.TokenSerializer(tokens, many=True).data)
|
||||
|
||||
def delete(self, request, pk):
|
||||
tokens = get_object_or_404(User, pk=pk).auth_token_set.filter(
|
||||
expiry__gt=djangotime.now()
|
||||
)
|
||||
|
||||
tokens.delete()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class DeleteActiveLoginSession(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
def delete(self, request, pk):
|
||||
token = get_object_or_404(AuthToken, digest=pk)
|
||||
|
||||
token.delete()
|
||||
|
||||
return Response("ok")
|
||||
AuditLog.audit_user_failed_twofactor(request.data["username"])
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class GetAddUsers(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
class UserSerializerSSO(ModelSerializer):
|
||||
social_accounts = SerializerMethodField()
|
||||
|
||||
def get_social_accounts(self, obj):
|
||||
accounts = SocialAccount.objects.filter(user_id=obj.pk)
|
||||
|
||||
if accounts:
|
||||
social_accounts = []
|
||||
for account in accounts:
|
||||
try:
|
||||
provider_account = account.get_provider_account()
|
||||
display = provider_account.to_str()
|
||||
except SocialApp.DoesNotExist:
|
||||
display = "Orphaned Provider"
|
||||
except Exception:
|
||||
display = "Unknown"
|
||||
|
||||
social_accounts.append(
|
||||
{
|
||||
"uid": account.uid,
|
||||
"provider": account.provider,
|
||||
"display": display,
|
||||
"last_login": account.last_login,
|
||||
"date_joined": account.date_joined,
|
||||
"extra_data": account.extra_data,
|
||||
}
|
||||
)
|
||||
|
||||
return social_accounts
|
||||
|
||||
return []
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"username",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"email",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"last_login_ip",
|
||||
"role",
|
||||
"block_dashboard_login",
|
||||
"date_format",
|
||||
"social_accounts",
|
||||
]
|
||||
|
||||
def get(self, request):
|
||||
search = request.GET.get("search", None)
|
||||
agents = Agent.objects.values_list("agent_id", flat=True)
|
||||
users = User.objects.exclude(username__in=agents)
|
||||
|
||||
if search:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False).filter(
|
||||
username__icontains=search
|
||||
)
|
||||
else:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False)
|
||||
|
||||
return Response(self.UserSerializerSSO(users, many=True).data)
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
# add new user
|
||||
try:
|
||||
user = User.objects.create_user( # type: ignore
|
||||
user = User.objects.create_user(
|
||||
request.data["username"],
|
||||
request.data["email"],
|
||||
request.data["password"],
|
||||
@@ -254,22 +92,15 @@ class GetAddUsers(APIView):
|
||||
f"ERROR: User {request.data['username']} already exists!"
|
||||
)
|
||||
|
||||
if "first_name" in request.data.keys():
|
||||
user.first_name = request.data["first_name"]
|
||||
if "last_name" in request.data.keys():
|
||||
user.last_name = request.data["last_name"]
|
||||
if "role" in request.data.keys() and isinstance(request.data["role"], int):
|
||||
role = get_object_or_404(Role, pk=request.data["role"])
|
||||
user.role = role
|
||||
|
||||
user.first_name = request.data["first_name"]
|
||||
user.last_name = request.data["last_name"]
|
||||
# Can be changed once permissions and groups are introduced
|
||||
user.is_superuser = True
|
||||
user.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response(user.username)
|
||||
|
||||
|
||||
class GetUpdateDeleteUser(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
|
||||
@@ -278,35 +109,24 @@ class GetUpdateDeleteUser(APIView):
|
||||
def put(self, request, pk):
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
|
||||
if is_root_user(request=request, user=user):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
if is_root_user(request=request, user=user):
|
||||
return notify_error("The root user cannot be deleted from the UI")
|
||||
get_object_or_404(User, pk=pk).delete()
|
||||
|
||||
user.delete()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class UserActions(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms, LocalUserPerms]
|
||||
|
||||
# reset password
|
||||
def post(self, request):
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
if is_root_user(request=request, user=user):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
user.set_password(request.data["password"])
|
||||
user.save()
|
||||
|
||||
@@ -314,10 +134,8 @@ class UserActions(APIView):
|
||||
|
||||
# reset two factor token
|
||||
def put(self, request):
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
if is_root_user(request=request, user=user):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
user.totp_key = ""
|
||||
user.save()
|
||||
|
||||
@@ -327,8 +145,10 @@ class UserActions(APIView):
|
||||
|
||||
|
||||
class TOTPSetup(APIView):
|
||||
|
||||
# totp setup
|
||||
def post(self, request):
|
||||
|
||||
user = request.user
|
||||
if not user.totp_key:
|
||||
code = pyotp.random_base32()
|
||||
@@ -336,109 +156,4 @@ class TOTPSetup(APIView):
|
||||
user.save(update_fields=["totp_key"])
|
||||
return Response(TOTPSetupSerializer(user).data)
|
||||
|
||||
return Response(False)
|
||||
|
||||
|
||||
class UserUI(APIView):
|
||||
def patch(self, request):
|
||||
serializer = UserUISerializer(
|
||||
instance=request.user, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class GetAddRoles(APIView):
|
||||
permission_classes = [IsAuthenticated, RolesPerms]
|
||||
|
||||
def get(self, request):
|
||||
roles = Role.objects.all()
|
||||
return Response(RoleSerializer(roles, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
serializer = RoleSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("Role was added")
|
||||
|
||||
|
||||
class GetUpdateDeleteRole(APIView):
|
||||
permission_classes = [IsAuthenticated, RolesPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
role = get_object_or_404(Role, pk=pk)
|
||||
return Response(RoleSerializer(role).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
role = get_object_or_404(Role, pk=pk)
|
||||
serializer = RoleSerializer(instance=role, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("Role was edited")
|
||||
|
||||
def delete(self, request, pk):
|
||||
role = get_object_or_404(Role, pk=pk)
|
||||
role.delete()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("Role was removed")
|
||||
|
||||
|
||||
class GetAddAPIKeys(APIView):
|
||||
permission_classes = [IsAuthenticated, APIKeyPerms]
|
||||
|
||||
def get(self, request):
|
||||
apikeys = APIKey.objects.all()
|
||||
return Response(APIKeySerializer(apikeys, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
# generate a random API Key
|
||||
from django.utils.crypto import get_random_string
|
||||
|
||||
request.data["key"] = get_random_string(length=32).upper()
|
||||
serializer = APIKeySerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("The API Key was added")
|
||||
|
||||
|
||||
class GetUpdateDeleteAPIKey(APIView):
|
||||
permission_classes = [IsAuthenticated, APIKeyPerms]
|
||||
|
||||
def put(self, request, pk):
|
||||
apikey = get_object_or_404(APIKey, pk=pk)
|
||||
|
||||
# remove API key is present in request data
|
||||
if "key" in request.data.keys():
|
||||
request.data.pop("key")
|
||||
|
||||
serializer = APIKeySerializer(instance=apikey, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("The API Key was edited")
|
||||
|
||||
def delete(self, request, pk):
|
||||
apikey = get_object_or_404(APIKey, pk=pk)
|
||||
apikey.delete()
|
||||
return Response("The API Key was deleted")
|
||||
|
||||
|
||||
class ResetPass(APIView):
|
||||
permission_classes = [IsAuthenticated, SelfResetSSOPerms]
|
||||
|
||||
def put(self, request):
|
||||
user = request.user
|
||||
user.set_password(request.data["password"])
|
||||
user.save()
|
||||
return Response("Password was reset.")
|
||||
|
||||
|
||||
class Reset2FA(APIView):
|
||||
permission_classes = [IsAuthenticated, SelfResetSSOPerms]
|
||||
|
||||
def put(self, request):
|
||||
user = request.user
|
||||
user.totp_key = ""
|
||||
user.save()
|
||||
return Response("2FA was reset. Log out and back in to setup.")
|
||||
return Response("totp token already set")
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Agent, AgentCustomField, AgentHistory, Note
|
||||
from .models import Agent, AgentOutage, RecoveryAction, Note
|
||||
|
||||
admin.site.register(Agent)
|
||||
admin.site.register(AgentOutage)
|
||||
admin.site.register(RecoveryAction)
|
||||
admin.site.register(Note)
|
||||
admin.site.register(AgentCustomField)
|
||||
admin.site.register(AgentHistory)
|
||||
|
||||
@@ -1,65 +1,28 @@
|
||||
import json
|
||||
import os
|
||||
import secrets
|
||||
import string
|
||||
from .models import Agent
|
||||
from model_bakery.recipe import Recipe, seq
|
||||
from itertools import cycle
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery.recipe import Recipe, foreign_key, seq
|
||||
|
||||
from tacticalrmm.constants import AgentMonType, AgentPlat
|
||||
|
||||
|
||||
def generate_agent_id() -> str:
|
||||
return "".join(secrets.choice(string.ascii_letters) for i in range(39))
|
||||
|
||||
|
||||
site = Recipe("clients.Site")
|
||||
|
||||
|
||||
def get_wmi_data():
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json")
|
||||
) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def get_win_svcs():
|
||||
svcs = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winsvcs.json")
|
||||
with open(svcs) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
agent = Recipe(
|
||||
"agents.Agent",
|
||||
site=foreign_key(site),
|
||||
hostname="DESKTOP-TEST123",
|
||||
version="1.3.0",
|
||||
monitoring_type=cycle(AgentMonType.values),
|
||||
agent_id=seq(generate_agent_id()),
|
||||
last_seen=djangotime.now() - djangotime.timedelta(days=5),
|
||||
plat=AgentPlat.WINDOWS,
|
||||
Agent,
|
||||
client="Default",
|
||||
site="Default",
|
||||
hostname=seq("TestHostname"),
|
||||
monitoring_type=cycle(["workstation", "server"]),
|
||||
)
|
||||
|
||||
server_agent = agent.extend(
|
||||
monitoring_type=AgentMonType.SERVER,
|
||||
monitoring_type="server",
|
||||
)
|
||||
|
||||
workstation_agent = agent.extend(
|
||||
monitoring_type=AgentMonType.WORKSTATION,
|
||||
monitoring_type="workstation",
|
||||
)
|
||||
|
||||
online_agent = agent.extend(
|
||||
last_seen=djangotime.now(), services=get_win_svcs(), wmi_detail=get_wmi_data()
|
||||
)
|
||||
|
||||
offline_agent = agent.extend(
|
||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=7)
|
||||
)
|
||||
online_agent = agent.extend(last_seen=djangotime.now())
|
||||
|
||||
overdue_agent = agent.extend(
|
||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=35)
|
||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=6)
|
||||
)
|
||||
|
||||
agent_with_services = agent.extend(
|
||||
@@ -86,5 +49,3 @@ agent_with_services = agent.extend(
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
agent_with_wmi = agent.extend(wmi_detail=get_wmi_data())
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
from agents.models import Agent, AgentHistory
|
||||
from channels.db import database_sync_to_async
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.shortcuts import get_object_or_404
|
||||
from tacticalrmm.constants import AGENT_DEFER, AgentHistoryType
|
||||
from tacticalrmm.permissions import _has_perm_on_agent
|
||||
|
||||
|
||||
class SendCMD(AsyncJsonWebsocketConsumer):
|
||||
async def connect(self):
|
||||
self.user = self.scope["user"]
|
||||
|
||||
if isinstance(self.user, AnonymousUser):
|
||||
await self.close()
|
||||
|
||||
await self.accept()
|
||||
|
||||
async def receive_json(self, payload, **kwargs):
|
||||
auth = await self.has_perm(payload["agent_id"])
|
||||
if not auth:
|
||||
await self.send_json(
|
||||
{"ret": "You do not have permission to perform this action."}
|
||||
)
|
||||
return
|
||||
|
||||
agent = await self.get_agent(payload["agent_id"])
|
||||
timeout = int(payload["timeout"])
|
||||
if payload["shell"] == "custom" and payload["custom_shell"]:
|
||||
shell = payload["custom_shell"]
|
||||
else:
|
||||
shell = payload["shell"]
|
||||
|
||||
hist_pk = await self.get_history_id(agent, payload["cmd"])
|
||||
|
||||
data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"command": payload["cmd"],
|
||||
"shell": shell,
|
||||
},
|
||||
"id": hist_pk,
|
||||
}
|
||||
|
||||
ret = await agent.nats_cmd(data, timeout=timeout + 2)
|
||||
await self.send_json({"ret": ret})
|
||||
|
||||
async def disconnect(self, _):
|
||||
pass
|
||||
|
||||
def _has_perm(self, perm: str) -> bool:
|
||||
if self.user.is_superuser or (
|
||||
self.user.role and getattr(self.user.role, "is_superuser")
|
||||
):
|
||||
return True
|
||||
|
||||
# make sure non-superusers with empty roles aren't permitted
|
||||
elif not self.user.role:
|
||||
return False
|
||||
|
||||
return self.user.role and getattr(self.user.role, perm)
|
||||
|
||||
@database_sync_to_async # type: ignore
|
||||
def get_agent(self, agent_id: str) -> "Agent":
|
||||
return get_object_or_404(Agent.objects.defer(*AGENT_DEFER), agent_id=agent_id)
|
||||
|
||||
@database_sync_to_async # type: ignore
|
||||
def get_history_id(self, agent: "Agent", cmd: str) -> int:
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.CMD_RUN,
|
||||
command=cmd,
|
||||
username=self.user.username[:50],
|
||||
)
|
||||
return hist.pk
|
||||
|
||||
@database_sync_to_async # type: ignore
|
||||
def has_perm(self, agent_id: str) -> bool:
|
||||
return self._has_perm("can_send_cmd") and _has_perm_on_agent(
|
||||
self.user, agent_id
|
||||
)
|
||||
@@ -1,93 +0,0 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
from clients.models import Client, Site
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Bulk update agent offline/overdue time"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("time", type=int, help="Time in minutes")
|
||||
parser.add_argument(
|
||||
"--client",
|
||||
type=str,
|
||||
help="Client Name",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--site",
|
||||
type=str,
|
||||
help="Site Name",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--offline",
|
||||
action="store_true",
|
||||
help="Offline",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--overdue",
|
||||
action="store_true",
|
||||
help="Overdue",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all",
|
||||
action="store_true",
|
||||
help="All agents",
|
||||
)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
time = kwargs["time"]
|
||||
client_name = kwargs["client"]
|
||||
site_name = kwargs["site"]
|
||||
all_agents = kwargs["all"]
|
||||
offline = kwargs["offline"]
|
||||
overdue = kwargs["overdue"]
|
||||
agents = None
|
||||
|
||||
if offline and time < 2:
|
||||
self.stdout.write(self.style.ERROR("Minimum offline time is 2 minutes"))
|
||||
return
|
||||
|
||||
if overdue and time < 3:
|
||||
self.stdout.write(self.style.ERROR("Minimum overdue time is 3 minutes"))
|
||||
return
|
||||
|
||||
if client_name:
|
||||
try:
|
||||
client = Client.objects.get(name=client_name)
|
||||
except Client.DoesNotExist:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f"Client {client_name} doesn't exist")
|
||||
)
|
||||
return
|
||||
|
||||
agents = Agent.objects.filter(site__client=client)
|
||||
|
||||
elif site_name:
|
||||
try:
|
||||
site = Site.objects.get(name=site_name)
|
||||
except Site.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f"Site {site_name} doesn't exist"))
|
||||
return
|
||||
|
||||
agents = Agent.objects.filter(site=site)
|
||||
|
||||
elif all_agents:
|
||||
agents = Agent.objects.all()
|
||||
|
||||
if agents:
|
||||
if offline:
|
||||
agents.update(offline_time=time)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Changed offline time on {len(agents)} agents to {time} minutes"
|
||||
)
|
||||
)
|
||||
|
||||
if overdue:
|
||||
agents.update(overdue_time=time)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Changed overdue time on {len(agents)} agents to {time} minutes"
|
||||
)
|
||||
)
|
||||
@@ -1,112 +0,0 @@
|
||||
import asyncio
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.constants import AGENT_DEFER
|
||||
from tacticalrmm.utils import reload_nats
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Delete multiple agents based on criteria"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--days",
|
||||
type=int,
|
||||
help="Delete agents that have not checked in for this many days",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--agentver",
|
||||
type=str,
|
||||
help="Delete agents that equal to or less than this version",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--site",
|
||||
type=str,
|
||||
help="Delete agents that belong to the specified site",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--client",
|
||||
type=str,
|
||||
help="Delete agents that belong to the specified client",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--hostname",
|
||||
type=str,
|
||||
help="Delete agents with hostname starting with argument",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--delete",
|
||||
action="store_true",
|
||||
help="This will delete agents",
|
||||
)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
days = kwargs["days"]
|
||||
agentver = kwargs["agentver"]
|
||||
site = kwargs["site"]
|
||||
client = kwargs["client"]
|
||||
hostname = kwargs["hostname"]
|
||||
delete = kwargs["delete"]
|
||||
|
||||
if not days and not agentver and not site and not client and not hostname:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"Must have at least one parameter: days, agentver, site, client or hostname"
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
agents = Agent.objects.select_related("site__client").defer(*AGENT_DEFER)
|
||||
|
||||
if days:
|
||||
overdue = djangotime.now() - djangotime.timedelta(days=days)
|
||||
agents = agents.filter(last_seen__lt=overdue)
|
||||
|
||||
if site:
|
||||
agents = agents.filter(site__name=site)
|
||||
|
||||
if client:
|
||||
agents = agents.filter(site__client__name=client)
|
||||
|
||||
if hostname:
|
||||
agents = agents.filter(hostname__istartswith=hostname)
|
||||
|
||||
if agentver:
|
||||
agents = [
|
||||
i for i in agents if pyver.parse(i.version) <= pyver.parse(agentver)
|
||||
]
|
||||
|
||||
if len(agents) == 0:
|
||||
self.stdout.write(self.style.ERROR("No agents matched"))
|
||||
return
|
||||
|
||||
deleted_count = 0
|
||||
for agent in agents:
|
||||
s = f"{agent.hostname} | Version {agent.version} | Last Seen {agent.last_seen} | {agent.client} > {agent.site}"
|
||||
if delete:
|
||||
s = "Deleting " + s
|
||||
self.stdout.write(self.style.SUCCESS(s))
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
try:
|
||||
agent.delete()
|
||||
except Exception as e:
|
||||
err = f"Failed to delete agent {agent.hostname}: {e}"
|
||||
self.stdout.write(self.style.ERROR(err))
|
||||
else:
|
||||
deleted_count += 1
|
||||
else:
|
||||
self.stdout.write(self.style.WARNING(s))
|
||||
|
||||
if delete:
|
||||
reload_nats()
|
||||
self.stdout.write(self.style.SUCCESS(f"Deleted {deleted_count} agents"))
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"The above agents would be deleted. Run again with --delete to actually delete them."
|
||||
)
|
||||
)
|
||||
@@ -1,31 +0,0 @@
|
||||
# import datetime as dt
|
||||
import random
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from agents.models import Agent
|
||||
from core.tasks import cache_db_fields_task
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "stuff for demo site in cron"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
random_dates = []
|
||||
now = djangotime.now()
|
||||
|
||||
for _ in range(20):
|
||||
rand = now - djangotime.timedelta(minutes=random.randint(1, 2))
|
||||
random_dates.append(rand)
|
||||
|
||||
for _ in range(5):
|
||||
rand = now - djangotime.timedelta(minutes=random.randint(10, 20))
|
||||
random_dates.append(rand)
|
||||
|
||||
agents = Agent.objects.only("last_seen")
|
||||
for agent in agents:
|
||||
agent.last_seen = random.choice(random_dates)
|
||||
agent.save(update_fields=["last_seen"])
|
||||
|
||||
cache_db_fields_task()
|
||||
@@ -1,846 +0,0 @@
|
||||
import datetime as dt
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from accounts.models import User
|
||||
from agents.models import Agent, AgentHistory
|
||||
from automation.models import Policy
|
||||
from autotasks.models import AutomatedTask, TaskResult
|
||||
from checks.models import Check, CheckHistory, CheckResult
|
||||
from clients.models import Client, Site
|
||||
from logs.models import AuditLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.constants import (
|
||||
AgentHistoryType,
|
||||
AgentMonType,
|
||||
AgentPlat,
|
||||
AlertSeverity,
|
||||
CheckStatus,
|
||||
CheckType,
|
||||
EvtLogFailWhen,
|
||||
EvtLogNames,
|
||||
EvtLogTypes,
|
||||
GoArch,
|
||||
PAAction,
|
||||
ScriptShell,
|
||||
TaskSyncStatus,
|
||||
TaskType,
|
||||
)
|
||||
from tacticalrmm.demo_data import (
|
||||
check_network_loc_aware_ps1,
|
||||
check_storage_pool_health_ps1,
|
||||
clear_print_spool_bat,
|
||||
disks,
|
||||
disks_linux_deb,
|
||||
disks_linux_pi,
|
||||
ping_fail_output,
|
||||
ping_success_output,
|
||||
restart_nla_ps1,
|
||||
show_temp_dir_py,
|
||||
spooler_stdout,
|
||||
temp_dir_stdout,
|
||||
wmi_deb,
|
||||
wmi_pi,
|
||||
wmi_mac,
|
||||
disks_mac,
|
||||
)
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
AGENTS_TO_GENERATE = 250
|
||||
|
||||
SVCS = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winsvcs.json")
|
||||
WMI_1 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi1.json")
|
||||
WMI_2 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi2.json")
|
||||
WMI_3 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi3.json")
|
||||
SW_1 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/software1.json")
|
||||
SW_2 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/software2.json")
|
||||
WIN_UPDATES = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winupdates.json")
|
||||
EVT_LOG_FAIL = settings.BASE_DIR.joinpath(
|
||||
"tacticalrmm/test_data/eventlog_check_fail.json"
|
||||
)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "populate database with fake agents"
|
||||
|
||||
def rand_string(self, length: int) -> str:
|
||||
chars = string.ascii_letters
|
||||
return "".join(random.choice(chars) for _ in range(length))
|
||||
|
||||
def handle(self, *args, **kwargs) -> None:
|
||||
user = User.objects.first()
|
||||
if user:
|
||||
user.totp_key = "ABSA234234"
|
||||
user.save(update_fields=["totp_key"])
|
||||
|
||||
Agent.objects.all().delete()
|
||||
Client.objects.all().delete()
|
||||
Check.objects.all().delete()
|
||||
Script.objects.all().delete()
|
||||
AutomatedTask.objects.all().delete()
|
||||
CheckHistory.objects.all().delete()
|
||||
Policy.objects.all().delete()
|
||||
AuditLog.objects.all().delete()
|
||||
PendingAction.objects.all().delete()
|
||||
|
||||
call_command("load_community_scripts")
|
||||
call_command("initial_db_setup")
|
||||
call_command("load_chocos")
|
||||
call_command("create_installer_user")
|
||||
|
||||
# policies
|
||||
check_policy = Policy()
|
||||
check_policy.name = "Demo Checks Policy"
|
||||
check_policy.desc = "Demo Checks Policy"
|
||||
check_policy.active = True
|
||||
check_policy.enforced = True
|
||||
check_policy.save()
|
||||
|
||||
patch_policy = Policy()
|
||||
patch_policy.name = "Demo Patch Policy"
|
||||
patch_policy.desc = "Demo Patch Policy"
|
||||
patch_policy.active = True
|
||||
patch_policy.enforced = True
|
||||
patch_policy.save()
|
||||
|
||||
update_policy = WinUpdatePolicy()
|
||||
update_policy.policy = patch_policy
|
||||
update_policy.critical = "approve"
|
||||
update_policy.important = "approve"
|
||||
update_policy.moderate = "approve"
|
||||
update_policy.low = "ignore"
|
||||
update_policy.other = "ignore"
|
||||
update_policy.run_time_days = [6, 0, 2]
|
||||
update_policy.run_time_day = 1
|
||||
update_policy.reboot_after_install = "required"
|
||||
update_policy.reprocess_failed = True
|
||||
update_policy.email_if_fail = True
|
||||
update_policy.save()
|
||||
|
||||
clients = (
|
||||
"Company 1",
|
||||
"Company 2",
|
||||
"Company 3",
|
||||
"Company 4",
|
||||
"Company 5",
|
||||
"Company 6",
|
||||
)
|
||||
sites1 = ("HQ1", "LA Office 1", "NY Office 1")
|
||||
sites2 = ("HQ2", "LA Office 2", "NY Office 2")
|
||||
sites3 = ("HQ3", "LA Office 3", "NY Office 3")
|
||||
sites4 = ("HQ4", "LA Office 4", "NY Office 4")
|
||||
sites5 = ("HQ5", "LA Office 5", "NY Office 5")
|
||||
sites6 = ("HQ6", "LA Office 6", "NY Office 6")
|
||||
|
||||
client1 = Client(name=clients[0])
|
||||
client2 = Client(name=clients[1])
|
||||
client3 = Client(name=clients[2])
|
||||
client4 = Client(name=clients[3])
|
||||
client5 = Client(name=clients[4])
|
||||
client6 = Client(name=clients[5])
|
||||
|
||||
client1.save()
|
||||
client2.save()
|
||||
client3.save()
|
||||
client4.save()
|
||||
client5.save()
|
||||
client6.save()
|
||||
|
||||
for site in sites1:
|
||||
Site(client=client1, name=site).save()
|
||||
|
||||
for site in sites2:
|
||||
Site(client=client2, name=site).save()
|
||||
|
||||
for site in sites3:
|
||||
Site(client=client3, name=site).save()
|
||||
|
||||
for site in sites4:
|
||||
Site(client=client4, name=site).save()
|
||||
|
||||
for site in sites5:
|
||||
Site(client=client5, name=site).save()
|
||||
|
||||
for site in sites6:
|
||||
Site(client=client6, name=site).save()
|
||||
|
||||
hostnames = (
|
||||
"DC-1",
|
||||
"DC-2",
|
||||
"FSV-1",
|
||||
"FSV-2",
|
||||
"WSUS",
|
||||
"DESKTOP-12345",
|
||||
"LAPTOP-55443",
|
||||
"db-aws-01",
|
||||
"Karens-MacBook-Air.local",
|
||||
)
|
||||
descriptions = ("Bob's computer", "Primary DC", "File Server", "Karen's Laptop")
|
||||
modes = AgentMonType.values
|
||||
op_systems_servers = (
|
||||
"Microsoft Windows Server 2016 Standard, 64bit (build 14393)",
|
||||
"Microsoft Windows Server 2012 R2 Standard, 64bit (build 9600)",
|
||||
"Microsoft Windows Server 2019 Standard, 64bit (build 17763)",
|
||||
)
|
||||
|
||||
op_systems_workstations = (
|
||||
"Microsoft Windows 8.1 Pro, 64bit (build 9600)",
|
||||
"Microsoft Windows 10 Pro for Workstations, 64bit (build 18363)",
|
||||
"Microsoft Windows 10 Pro, 64bit (build 18363)",
|
||||
)
|
||||
|
||||
linux_deb_os = "Debian 11.2 x86_64 5.10.0-11-amd64"
|
||||
linux_pi_os = "Raspbian 11.2 armv7l 5.10.92-v7+"
|
||||
mac_os = "Darwin 12.5.1 arm64 21.6.0"
|
||||
|
||||
public_ips = ("65.234.22.4", "74.123.43.5", "44.21.134.45")
|
||||
|
||||
total_rams = (4, 8, 16, 32, 64, 128)
|
||||
|
||||
now = dt.datetime.now()
|
||||
django_now = djangotime.now()
|
||||
|
||||
boot_times = []
|
||||
|
||||
for _ in range(15):
|
||||
rand_hour = now - dt.timedelta(hours=random.randint(1, 22))
|
||||
boot_times.append(str(rand_hour.timestamp()))
|
||||
|
||||
for _ in range(5):
|
||||
rand_days = now - dt.timedelta(days=random.randint(2, 50))
|
||||
boot_times.append(str(rand_days.timestamp()))
|
||||
|
||||
user_names = ("None", "Karen", "Steve", "jsmith", "jdoe")
|
||||
|
||||
with open(SVCS) as f:
|
||||
services = json.load(f)
|
||||
|
||||
# WMI
|
||||
with open(WMI_1) as f:
|
||||
wmi1 = json.load(f)
|
||||
|
||||
with open(WMI_2) as f:
|
||||
wmi2 = json.load(f)
|
||||
|
||||
with open(WMI_3) as f:
|
||||
wmi3 = json.load(f)
|
||||
|
||||
wmi_details = [i for i in (wmi1, wmi2, wmi3)]
|
||||
|
||||
# software
|
||||
with open(SW_1) as f:
|
||||
software1 = json.load(f)
|
||||
|
||||
with open(SW_2) as f:
|
||||
software2 = json.load(f)
|
||||
|
||||
softwares = [i for i in (software1, software2)]
|
||||
|
||||
# windows updates
|
||||
with open(WIN_UPDATES) as f:
|
||||
windows_updates = json.load(f)["samplecomputer"]
|
||||
|
||||
# event log check fail data
|
||||
with open(EVT_LOG_FAIL) as f:
|
||||
eventlog_check_fail_data = json.load(f)
|
||||
|
||||
# create scripts
|
||||
|
||||
clear_spool = Script()
|
||||
clear_spool.name = "Clear Print Spooler"
|
||||
clear_spool.description = "clears the print spooler. Fuck printers"
|
||||
clear_spool.filename = "clear_print_spool.bat"
|
||||
clear_spool.shell = ScriptShell.CMD
|
||||
clear_spool.script_body = clear_print_spool_bat
|
||||
clear_spool.save()
|
||||
|
||||
check_net_aware = Script()
|
||||
check_net_aware.name = "Check Network Location Awareness"
|
||||
check_net_aware.description = "Check's network location awareness on domain computers, should always be domain profile and not public or private. Sometimes happens when computer restarts before domain available. This script will return 0 if check passes or 1 if it fails."
|
||||
check_net_aware.filename = "check_network_loc_aware.ps1"
|
||||
check_net_aware.shell = ScriptShell.POWERSHELL
|
||||
check_net_aware.script_body = check_network_loc_aware_ps1
|
||||
check_net_aware.save()
|
||||
|
||||
check_pool_health = Script()
|
||||
check_pool_health.name = "Check storage spool health"
|
||||
check_pool_health.description = "loops through all storage pools and will fail if any of them are not healthy"
|
||||
check_pool_health.filename = "check_storage_pool_health.ps1"
|
||||
check_pool_health.shell = ScriptShell.POWERSHELL
|
||||
check_pool_health.script_body = check_storage_pool_health_ps1
|
||||
check_pool_health.save()
|
||||
|
||||
restart_nla = Script()
|
||||
restart_nla.name = "Restart NLA Service"
|
||||
restart_nla.description = "restarts the Network Location Awareness windows service to fix the nic profile. Run this after the check network service fails"
|
||||
restart_nla.filename = "restart_nla.ps1"
|
||||
restart_nla.shell = ScriptShell.POWERSHELL
|
||||
restart_nla.script_body = restart_nla_ps1
|
||||
restart_nla.save()
|
||||
|
||||
show_tmp_dir_script = Script()
|
||||
show_tmp_dir_script.name = "Check temp dir"
|
||||
show_tmp_dir_script.description = "shows files in temp dir using python"
|
||||
show_tmp_dir_script.filename = "show_temp_dir.py"
|
||||
show_tmp_dir_script.shell = ScriptShell.PYTHON
|
||||
show_tmp_dir_script.script_body = show_temp_dir_py
|
||||
show_tmp_dir_script.save()
|
||||
|
||||
for count_agents in range(AGENTS_TO_GENERATE):
|
||||
client = random.choice(clients)
|
||||
|
||||
if client == clients[0]:
|
||||
site = random.choice(sites1)
|
||||
elif client == clients[1]:
|
||||
site = random.choice(sites2)
|
||||
elif client == clients[2]:
|
||||
site = random.choice(sites3)
|
||||
elif client == clients[3]:
|
||||
site = random.choice(sites4)
|
||||
elif client == clients[4]:
|
||||
site = random.choice(sites5)
|
||||
elif client == clients[5]:
|
||||
site = random.choice(sites6)
|
||||
|
||||
agent = Agent()
|
||||
|
||||
plat_pick = random.randint(1, 15)
|
||||
if plat_pick in (7, 11):
|
||||
agent.plat = AgentPlat.LINUX
|
||||
mode = AgentMonType.SERVER
|
||||
# pi arm
|
||||
if plat_pick == 7:
|
||||
agent.goarch = GoArch.ARM32
|
||||
agent.wmi_detail = wmi_pi
|
||||
agent.disks = disks_linux_pi
|
||||
agent.operating_system = linux_pi_os
|
||||
else:
|
||||
agent.goarch = GoArch.AMD64
|
||||
agent.wmi_detail = wmi_deb
|
||||
agent.disks = disks_linux_deb
|
||||
agent.operating_system = linux_deb_os
|
||||
elif plat_pick in (4, 14):
|
||||
agent.plat = AgentPlat.DARWIN
|
||||
mode = random.choice([AgentMonType.SERVER, AgentMonType.WORKSTATION])
|
||||
agent.goarch = GoArch.ARM64
|
||||
agent.wmi_detail = wmi_mac
|
||||
agent.disks = disks_mac
|
||||
agent.operating_system = mac_os
|
||||
else:
|
||||
agent.plat = AgentPlat.WINDOWS
|
||||
agent.goarch = GoArch.AMD64
|
||||
mode = random.choice(modes)
|
||||
agent.wmi_detail = random.choice(wmi_details)
|
||||
agent.services = services
|
||||
agent.disks = random.choice(disks)
|
||||
if mode == AgentMonType.SERVER:
|
||||
agent.operating_system = random.choice(op_systems_servers)
|
||||
else:
|
||||
agent.operating_system = random.choice(op_systems_workstations)
|
||||
|
||||
agent.version = settings.LATEST_AGENT_VER
|
||||
agent.hostname = random.choice(hostnames)
|
||||
agent.site = Site.objects.get(name=site)
|
||||
agent.agent_id = self.rand_string(40)
|
||||
agent.description = random.choice(descriptions)
|
||||
agent.monitoring_type = mode
|
||||
agent.public_ip = random.choice(public_ips)
|
||||
agent.last_seen = django_now
|
||||
|
||||
agent.total_ram = random.choice(total_rams)
|
||||
agent.boot_time = random.choice(boot_times)
|
||||
agent.logged_in_username = random.choice(user_names)
|
||||
agent.mesh_node_id = (
|
||||
"3UiLhe420@kaVQ0rswzBeonW$WY0xrFFUDBQlcYdXoriLXzvPmBpMrV99vRHXFlb"
|
||||
)
|
||||
agent.overdue_email_alert = random.choice([True, False])
|
||||
agent.overdue_text_alert = random.choice([True, False])
|
||||
agent.needs_reboot = random.choice([True, False])
|
||||
|
||||
agent.save()
|
||||
|
||||
if agent.plat == AgentPlat.WINDOWS:
|
||||
InstalledSoftware(agent=agent, software=random.choice(softwares)).save()
|
||||
|
||||
if mode == AgentMonType.WORKSTATION:
|
||||
WinUpdatePolicy(agent=agent, run_time_days=[5, 6]).save()
|
||||
else:
|
||||
WinUpdatePolicy(agent=agent).save()
|
||||
|
||||
if agent.plat == AgentPlat.WINDOWS:
|
||||
# windows updates load
|
||||
guids = [i for i in windows_updates.keys()]
|
||||
for i in guids:
|
||||
WinUpdate(
|
||||
agent=agent,
|
||||
guid=i,
|
||||
kb=windows_updates[i]["KBs"][0],
|
||||
title=windows_updates[i]["Title"],
|
||||
installed=windows_updates[i]["Installed"],
|
||||
downloaded=windows_updates[i]["Downloaded"],
|
||||
description=windows_updates[i]["Description"],
|
||||
severity=windows_updates[i]["Severity"],
|
||||
).save()
|
||||
|
||||
# agent histories
|
||||
hist = AgentHistory()
|
||||
hist.agent = agent
|
||||
hist.type = AgentHistoryType.CMD_RUN
|
||||
hist.command = "ping google.com"
|
||||
hist.username = "demo"
|
||||
hist.results = ping_success_output
|
||||
hist.save()
|
||||
|
||||
hist1 = AgentHistory()
|
||||
hist1.agent = agent
|
||||
hist1.type = AgentHistoryType.SCRIPT_RUN
|
||||
hist1.script = clear_spool
|
||||
hist1.script_results = {
|
||||
"id": 1,
|
||||
"stderr": "",
|
||||
"stdout": spooler_stdout,
|
||||
"execution_time": 3.5554593,
|
||||
"retcode": 0,
|
||||
}
|
||||
hist1.save()
|
||||
|
||||
if agent.plat == AgentPlat.WINDOWS:
|
||||
# disk space check
|
||||
check1 = Check()
|
||||
check1.agent = agent
|
||||
check1.check_type = CheckType.DISK_SPACE
|
||||
check1.warning_threshold = 25
|
||||
check1.error_threshold = 10
|
||||
check1.disk = "C:"
|
||||
check1.email_alert = random.choice([True, False])
|
||||
check1.text_alert = random.choice([True, False])
|
||||
check1.save()
|
||||
|
||||
check_result1 = CheckResult()
|
||||
check_result1.agent = agent
|
||||
check_result1.assigned_check = check1
|
||||
check_result1.status = CheckStatus.PASSING
|
||||
check_result1.last_run = django_now
|
||||
check_result1.more_info = "Total: 498.7GB, Free: 287.4GB"
|
||||
check_result1.save()
|
||||
|
||||
for i in range(30):
|
||||
check1_history = CheckHistory()
|
||||
check1_history.check_id = check1.pk
|
||||
check1_history.agent_id = agent.agent_id
|
||||
check1_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
||||
check1_history.y = random.randint(13, 40)
|
||||
check1_history.save()
|
||||
|
||||
# ping check
|
||||
check2 = Check()
|
||||
check_result2 = CheckResult()
|
||||
|
||||
check2.agent = agent
|
||||
check2.check_type = CheckType.PING
|
||||
|
||||
check2.email_alert = random.choice([True, False])
|
||||
check2.text_alert = random.choice([True, False])
|
||||
|
||||
check_result2.agent = agent
|
||||
check_result2.assigned_check = check2
|
||||
check_result2.last_run = django_now
|
||||
|
||||
if site in sites5:
|
||||
check2.name = "Synology NAS"
|
||||
check2.alert_severity = AlertSeverity.ERROR
|
||||
check_result2.status = CheckStatus.FAILING
|
||||
check2.ip = "172.17.14.26"
|
||||
check_result2.more_info = ping_fail_output
|
||||
else:
|
||||
check2.name = "Google"
|
||||
check_result2.status = CheckStatus.PASSING
|
||||
check2.ip = "8.8.8.8"
|
||||
check_result2.more_info = ping_success_output
|
||||
|
||||
check2.save()
|
||||
check_result2.save()
|
||||
|
||||
for i in range(30):
|
||||
check2_history = CheckHistory()
|
||||
check2_history.check_id = check2.pk
|
||||
check2_history.agent_id = agent.agent_id
|
||||
check2_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
||||
if site in sites5:
|
||||
check2_history.y = 1
|
||||
check2_history.results = ping_fail_output
|
||||
else:
|
||||
check2_history.y = 0
|
||||
check2_history.results = ping_success_output
|
||||
check2_history.save()
|
||||
|
||||
# cpu load check
|
||||
check3 = Check()
|
||||
check3.agent = agent
|
||||
check3.check_type = CheckType.CPU_LOAD
|
||||
check3.warning_threshold = 70
|
||||
check3.error_threshold = 90
|
||||
check3.email_alert = random.choice([True, False])
|
||||
check3.text_alert = random.choice([True, False])
|
||||
check3.save()
|
||||
|
||||
check_result3 = CheckResult()
|
||||
check_result3.agent = agent
|
||||
check_result3.assigned_check = check3
|
||||
check_result3.status = CheckStatus.PASSING
|
||||
check_result3.last_run = django_now
|
||||
check_result3.history = [
|
||||
15,
|
||||
23,
|
||||
16,
|
||||
22,
|
||||
22,
|
||||
27,
|
||||
15,
|
||||
23,
|
||||
23,
|
||||
20,
|
||||
10,
|
||||
10,
|
||||
13,
|
||||
34,
|
||||
]
|
||||
check_result3.save()
|
||||
|
||||
for i in range(30):
|
||||
check3_history = CheckHistory()
|
||||
check3_history.check_id = check3.pk
|
||||
check3_history.agent_id = agent.agent_id
|
||||
check3_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
||||
check3_history.y = random.randint(2, 79)
|
||||
check3_history.save()
|
||||
|
||||
# memory check
|
||||
check4 = Check()
|
||||
check4.agent = agent
|
||||
check4.check_type = CheckType.MEMORY
|
||||
check4.warning_threshold = 70
|
||||
check4.error_threshold = 85
|
||||
check4.email_alert = random.choice([True, False])
|
||||
check4.text_alert = random.choice([True, False])
|
||||
check4.save()
|
||||
|
||||
check_result4 = CheckResult()
|
||||
check_result4.agent = agent
|
||||
check_result4.assigned_check = check4
|
||||
check_result4.status = CheckStatus.PASSING
|
||||
check_result4.last_run = django_now
|
||||
check_result4.history = [34, 34, 35, 36, 34, 34, 34, 34, 34, 34]
|
||||
check_result4.save()
|
||||
|
||||
for i in range(30):
|
||||
check4_history = CheckHistory()
|
||||
check4_history.check_id = check4.pk
|
||||
check4_history.agent_id = agent.agent_id
|
||||
check4_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
||||
check4_history.y = random.randint(2, 79)
|
||||
check4_history.save()
|
||||
|
||||
# script check storage pool
|
||||
check5 = Check()
|
||||
|
||||
check5.agent = agent
|
||||
check5.check_type = CheckType.SCRIPT
|
||||
|
||||
check5.email_alert = random.choice([True, False])
|
||||
check5.text_alert = random.choice([True, False])
|
||||
check5.timeout = 120
|
||||
|
||||
check5.script = check_pool_health
|
||||
check5.save()
|
||||
|
||||
check_result5 = CheckResult()
|
||||
check_result5.agent = agent
|
||||
check_result5.assigned_check = check5
|
||||
check_result5.status = CheckStatus.PASSING
|
||||
check_result5.last_run = django_now
|
||||
check_result5.retcode = 0
|
||||
check_result5.execution_time = "4.0000"
|
||||
check_result5.save()
|
||||
|
||||
for i in range(30):
|
||||
check5_history = CheckHistory()
|
||||
check5_history.check_id = check5.pk
|
||||
check5_history.agent_id = agent.agent_id
|
||||
check5_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
||||
if i == 10 or i == 18:
|
||||
check5_history.y = 1
|
||||
else:
|
||||
check5_history.y = 0
|
||||
check5_history.results = {
|
||||
"retcode": 0,
|
||||
"stdout": None,
|
||||
"stderr": None,
|
||||
"execution_time": "4.0000",
|
||||
}
|
||||
check5_history.save()
|
||||
|
||||
check6 = Check()
|
||||
|
||||
check6.agent = agent
|
||||
check6.check_type = CheckType.SCRIPT
|
||||
check6.email_alert = random.choice([True, False])
|
||||
check6.text_alert = random.choice([True, False])
|
||||
check6.timeout = 120
|
||||
check6.script = check_net_aware
|
||||
check6.save()
|
||||
|
||||
check_result6 = CheckResult()
|
||||
check_result6.agent = agent
|
||||
check_result6.assigned_check = check6
|
||||
check_result6.status = CheckStatus.PASSING
|
||||
check_result6.last_run = django_now
|
||||
check_result6.retcode = 0
|
||||
check_result6.execution_time = "4.0000"
|
||||
check_result6.save()
|
||||
|
||||
for i in range(30):
|
||||
check6_history = CheckHistory()
|
||||
check6_history.check_id = check6.pk
|
||||
check6_history.agent_id = agent.agent_id
|
||||
check6_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
||||
check6_history.y = 0
|
||||
check6_history.results = {
|
||||
"retcode": 0,
|
||||
"stdout": None,
|
||||
"stderr": None,
|
||||
"execution_time": "4.0000",
|
||||
}
|
||||
check6_history.save()
|
||||
|
||||
nla_task = AutomatedTask()
|
||||
|
||||
nla_task.agent = agent
|
||||
actions = [
|
||||
{
|
||||
"name": restart_nla.name,
|
||||
"type": "script",
|
||||
"script": restart_nla.pk,
|
||||
"timeout": 90,
|
||||
"script_args": [],
|
||||
}
|
||||
]
|
||||
nla_task.actions = actions
|
||||
nla_task.assigned_check = check6
|
||||
nla_task.name = "Restart NLA"
|
||||
nla_task.task_type = TaskType.CHECK_FAILURE
|
||||
nla_task.save()
|
||||
|
||||
nla_task_result = TaskResult()
|
||||
nla_task_result.task = nla_task
|
||||
nla_task_result.agent = agent
|
||||
nla_task_result.execution_time = "1.8443"
|
||||
nla_task_result.last_run = django_now
|
||||
nla_task_result.stdout = "no stdout"
|
||||
nla_task_result.retcode = 0
|
||||
nla_task_result.sync_status = TaskSyncStatus.SYNCED
|
||||
nla_task_result.save()
|
||||
|
||||
spool_task = AutomatedTask()
|
||||
|
||||
spool_task.agent = agent
|
||||
actions = [
|
||||
{
|
||||
"name": clear_spool.name,
|
||||
"type": "script",
|
||||
"script": clear_spool.pk,
|
||||
"timeout": 90,
|
||||
"script_args": [],
|
||||
}
|
||||
]
|
||||
spool_task.actions = actions
|
||||
spool_task.name = "Clear the print spooler"
|
||||
spool_task.task_type = TaskType.DAILY
|
||||
spool_task.run_time_date = django_now + djangotime.timedelta(minutes=10)
|
||||
spool_task.expire_date = django_now + djangotime.timedelta(days=753)
|
||||
spool_task.daily_interval = 1
|
||||
spool_task.weekly_interval = 1
|
||||
spool_task.task_repetition_duration = "2h"
|
||||
spool_task.task_repetition_interval = "25m"
|
||||
spool_task.random_task_delay = "3m"
|
||||
spool_task.save()
|
||||
|
||||
spool_task_result = TaskResult()
|
||||
spool_task_result.task = spool_task
|
||||
spool_task_result.agent = agent
|
||||
spool_task_result.last_run = django_now
|
||||
spool_task_result.retcode = 0
|
||||
spool_task_result.stdout = spooler_stdout
|
||||
spool_task_result.sync_status = TaskSyncStatus.SYNCED
|
||||
spool_task_result.save()
|
||||
|
||||
tmp_dir_task = AutomatedTask()
|
||||
tmp_dir_task.agent = agent
|
||||
tmp_dir_task.name = "show temp dir files"
|
||||
actions = [
|
||||
{
|
||||
"name": show_tmp_dir_script.name,
|
||||
"type": "script",
|
||||
"script": show_tmp_dir_script.pk,
|
||||
"timeout": 90,
|
||||
"script_args": [],
|
||||
}
|
||||
]
|
||||
tmp_dir_task.actions = actions
|
||||
tmp_dir_task.task_type = TaskType.MANUAL
|
||||
tmp_dir_task.save()
|
||||
|
||||
tmp_dir_task_result = TaskResult()
|
||||
tmp_dir_task_result.task = tmp_dir_task
|
||||
tmp_dir_task_result.agent = agent
|
||||
tmp_dir_task_result.last_run = django_now
|
||||
tmp_dir_task_result.stdout = temp_dir_stdout
|
||||
tmp_dir_task_result.retcode = 0
|
||||
tmp_dir_task_result.sync_status = TaskSyncStatus.SYNCED
|
||||
tmp_dir_task_result.save()
|
||||
|
||||
check7 = Check()
|
||||
|
||||
check7.agent = agent
|
||||
check7.check_type = CheckType.SCRIPT
|
||||
|
||||
check7.email_alert = random.choice([True, False])
|
||||
check7.text_alert = random.choice([True, False])
|
||||
check7.timeout = 120
|
||||
|
||||
check7.script = clear_spool
|
||||
|
||||
check7.save()
|
||||
|
||||
check_result7 = CheckResult()
|
||||
check_result7.assigned_check = check7
|
||||
check_result7.agent = agent
|
||||
check_result7.status = CheckStatus.PASSING
|
||||
check_result7.last_run = django_now
|
||||
check_result7.retcode = 0
|
||||
check_result7.execution_time = "3.1337"
|
||||
check_result7.stdout = spooler_stdout
|
||||
check_result7.save()
|
||||
|
||||
for i in range(30):
|
||||
check7_history = CheckHistory()
|
||||
check7_history.check_id = check7.pk
|
||||
check7_history.agent_id = agent.agent_id
|
||||
check7_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
||||
check7_history.y = 0
|
||||
check7_history.results = {
|
||||
"retcode": 0,
|
||||
"stdout": spooler_stdout,
|
||||
"stderr": None,
|
||||
"execution_time": "3.1337",
|
||||
}
|
||||
check7_history.save()
|
||||
|
||||
if agent.plat == AgentPlat.WINDOWS:
|
||||
check8 = Check()
|
||||
check8.agent = agent
|
||||
check8.check_type = CheckType.WINSVC
|
||||
check8.email_alert = random.choice([True, False])
|
||||
check8.text_alert = random.choice([True, False])
|
||||
check8.fails_b4_alert = 4
|
||||
check8.svc_name = "Spooler"
|
||||
check8.svc_display_name = "Print Spooler"
|
||||
check8.pass_if_start_pending = False
|
||||
check8.restart_if_stopped = True
|
||||
check8.save()
|
||||
|
||||
check_result8 = CheckResult()
|
||||
check_result8.assigned_check = check8
|
||||
check_result8.agent = agent
|
||||
check_result8.status = CheckStatus.PASSING
|
||||
check_result8.last_run = django_now
|
||||
check_result8.more_info = "Status RUNNING"
|
||||
check_result8.save()
|
||||
|
||||
for i in range(30):
|
||||
check8_history = CheckHistory()
|
||||
check8_history.check_id = check8.pk
|
||||
check8_history.agent_id = agent.agent_id
|
||||
check8_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
||||
if i == 10 or i == 18:
|
||||
check8_history.y = 1
|
||||
check8_history.results = "Status STOPPED"
|
||||
else:
|
||||
check8_history.y = 0
|
||||
check8_history.results = "Status RUNNING"
|
||||
check8_history.save()
|
||||
|
||||
check9 = Check()
|
||||
check9.agent = agent
|
||||
check9.check_type = CheckType.EVENT_LOG
|
||||
check9.name = "unexpected shutdown"
|
||||
check9.email_alert = random.choice([True, False])
|
||||
check9.text_alert = random.choice([True, False])
|
||||
check9.fails_b4_alert = 2
|
||||
check9.log_name = EvtLogNames.APPLICATION
|
||||
check9.event_id = 1001
|
||||
check9.event_type = EvtLogTypes.INFO
|
||||
check9.fail_when = EvtLogFailWhen.CONTAINS
|
||||
check9.search_last_days = 30
|
||||
|
||||
check_result9 = CheckResult()
|
||||
check_result9.agent = agent
|
||||
check_result9.assigned_check = check9
|
||||
|
||||
check_result9.last_run = django_now
|
||||
if site in sites5:
|
||||
check_result9.extra_details = eventlog_check_fail_data
|
||||
check_result9.status = CheckStatus.FAILING
|
||||
else:
|
||||
check_result9.extra_details = {"log": []}
|
||||
check_result9.status = CheckStatus.PASSING
|
||||
|
||||
check9.save()
|
||||
check_result9.save()
|
||||
|
||||
for i in range(30):
|
||||
check9_history = CheckHistory()
|
||||
check9_history.check_id = check9.pk
|
||||
check9_history.agent_id = agent.agent_id
|
||||
check9_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
||||
if i == 10 or i == 18:
|
||||
check9_history.y = 1
|
||||
check9_history.results = "Events Found: 16"
|
||||
else:
|
||||
check9_history.y = 0
|
||||
check9_history.results = "Events Found: 0"
|
||||
check9_history.save()
|
||||
|
||||
pick = random.randint(1, 10)
|
||||
|
||||
if pick == 5 or pick == 3:
|
||||
reboot_time = django_now + djangotime.timedelta(
|
||||
minutes=random.randint(1000, 500000)
|
||||
)
|
||||
date_obj = dt.datetime.strftime(reboot_time, "%Y-%m-%d %H:%M")
|
||||
|
||||
obj = dt.datetime.strptime(date_obj, "%Y-%m-%d %H:%M")
|
||||
|
||||
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||
random.choice(string.ascii_letters) for _ in range(10)
|
||||
)
|
||||
|
||||
sched_reboot = PendingAction()
|
||||
sched_reboot.agent = agent
|
||||
sched_reboot.action_type = PAAction.SCHED_REBOOT
|
||||
sched_reboot.details = {
|
||||
"time": str(obj),
|
||||
"taskname": task_name,
|
||||
}
|
||||
sched_reboot.save()
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"Added agent # {count_agents + 1}"))
|
||||
|
||||
self.stdout.write("done")
|
||||
@@ -1,30 +0,0 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.constants import AGENT_DEFER
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Find all agents that have a certain service installed"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("name", type=str)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
search = kwargs["name"].lower()
|
||||
|
||||
agents = Agent.objects.defer(*AGENT_DEFER)
|
||||
for agent in agents:
|
||||
try:
|
||||
for svc in agent.services:
|
||||
if (
|
||||
search in svc["name"].lower()
|
||||
or search in svc["display_name"].lower()
|
||||
):
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"{agent.hostname} - {svc['name']} ({svc['display_name']}) - {svc['status']}"
|
||||
)
|
||||
)
|
||||
except:
|
||||
continue
|
||||
@@ -1,24 +0,0 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.constants import AGENT_DEFER
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
def find_duplicates(self, lst):
|
||||
return list(set([item for item in lst if lst.count(item) > 1]))
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
for agent in Agent.objects.defer(*AGENT_DEFER).prefetch_related(
|
||||
"custom_fields__field"
|
||||
):
|
||||
if dupes := self.find_duplicates(
|
||||
[i.field.name for i in agent.custom_fields.all()]
|
||||
):
|
||||
for dupe in dupes:
|
||||
cf = list(
|
||||
agent.custom_fields.filter(field__name=dupe).order_by("id")
|
||||
)
|
||||
to_delete = cf[:-1]
|
||||
for i in to_delete:
|
||||
i.delete()
|
||||
16
api/tacticalrmm/agents/management/commands/fix_salt_key.py
Normal file
16
api/tacticalrmm/agents/management/commands/fix_salt_key.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Changes existing agents salt_id from a property to a model field"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
agents = Agent.objects.filter(salt_id=None)
|
||||
for agent in agents:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Setting salt_id on {agent.hostname}")
|
||||
)
|
||||
agent.salt_id = f"{agent.hostname}-{agent.pk}"
|
||||
agent.save(update_fields=["salt_id"])
|
||||
@@ -1,18 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.constants import AGENT_STATUS_ONLINE, ONLINE_AGENTS
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Shows online agents that are not on the latest version"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
only = ONLINE_AGENTS + ("hostname",)
|
||||
q = Agent.objects.exclude(version=settings.LATEST_AGENT_VER).only(*only)
|
||||
agents = [i for i in q if i.status == AGENT_STATUS_ONLINE]
|
||||
for agent in agents:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"{agent.hostname} - v{agent.version}")
|
||||
)
|
||||
@@ -1,26 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from agents.tasks import send_agent_update_task
|
||||
from core.utils import get_core_settings, token_is_valid
|
||||
from tacticalrmm.constants import AGENT_DEFER
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Triggers an agent update task to run"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
core = get_core_settings()
|
||||
if not core.agent_auto_update:
|
||||
return
|
||||
|
||||
q = Agent.objects.defer(*AGENT_DEFER).exclude(version=settings.LATEST_AGENT_VER)
|
||||
agent_ids: list[str] = [
|
||||
i.agent_id
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
token, _ = token_is_valid()
|
||||
send_agent_update_task.delay(agent_ids=agent_ids, token=token, force=False)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user