Compare commits
177 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2216ee422e | ||
|
|
c877c9b0fb | ||
|
|
bd4549f389 | ||
|
|
5e20a5cd71 | ||
|
|
f82b589d03 | ||
|
|
ec2663a152 | ||
|
|
8867d12ec7 | ||
|
|
2d7724383f | ||
|
|
31830dc67d | ||
|
|
455bf53ba6 | ||
|
|
bc99434574 | ||
|
|
6e9bb0c4f4 | ||
|
|
00c5f1365a | ||
|
|
f7d317328a | ||
|
|
3ccd705225 | ||
|
|
9e439fffaa | ||
|
|
859dc170e7 | ||
|
|
1932d8fad9 | ||
|
|
0c814ae436 | ||
|
|
89313d8a37 | ||
|
|
2b85722222 | ||
|
|
57e5b0188c | ||
|
|
2d7c830e70 | ||
|
|
ccaa1790a9 | ||
|
|
f6531d905e | ||
|
|
64a31879d3 | ||
|
|
0c6a4b1ed2 | ||
|
|
67801f39fe | ||
|
|
892a0d67bf | ||
|
|
9fc0b7d5cc | ||
|
|
22a614ef54 | ||
|
|
cd257b8e4d | ||
|
|
fa1ee2ca14 | ||
|
|
34ea1adde6 | ||
|
|
41cf8abb1f | ||
|
|
c0ffec1a4c | ||
|
|
65779b8eaf | ||
|
|
c47bdb2d56 | ||
|
|
d47ae642e7 | ||
|
|
39c4609cc6 | ||
|
|
3ebba02a10 | ||
|
|
4dc7a96e79 | ||
|
|
5a49a29110 | ||
|
|
1e2a56c5e9 | ||
|
|
da54e97217 | ||
|
|
2eefedadb3 | ||
|
|
ff07c69e7d | ||
|
|
ff2508382a | ||
|
|
3969208942 | ||
|
|
62ec8c8f76 | ||
|
|
fb54d4bb64 | ||
|
|
5c74d1d021 | ||
|
|
9011148adf | ||
|
|
8dddd2d896 | ||
|
|
4942f262f1 | ||
|
|
38179b9d38 | ||
|
|
eca8f32570 | ||
|
|
ba42c5e367 | ||
|
|
71e78bd0c5 | ||
|
|
bc6faf817f | ||
|
|
42cdf70cb4 | ||
|
|
715982e40a | ||
|
|
5aa15c51ec | ||
|
|
541e07fb65 | ||
|
|
89d95d3ae1 | ||
|
|
9970403964 | ||
|
|
2803cee29b | ||
|
|
ade64d6c0a | ||
|
|
1a9bb3e986 | ||
|
|
c44e9a7292 | ||
|
|
dd8d39e698 | ||
|
|
221418120e | ||
|
|
7f3daea648 | ||
|
|
182c85a228 | ||
|
|
8dd636b0eb | ||
|
|
d1c3fc8493 | ||
|
|
b5603a5233 | ||
|
|
28edc31d43 | ||
|
|
aa8b84a302 | ||
|
|
f21ae93197 | ||
|
|
1eefc6fbf4 | ||
|
|
09ebf2cea2 | ||
|
|
f4b7924e8f | ||
|
|
dfbaa71132 | ||
|
|
c5d05c1205 | ||
|
|
1432853b39 | ||
|
|
1e43b55804 | ||
|
|
83ba480863 | ||
|
|
f158ea25e9 | ||
|
|
c14ffd08a0 | ||
|
|
6e1239340b | ||
|
|
eae9c04429 | ||
|
|
4a1f5558b8 | ||
|
|
b7ce5fdd3e | ||
|
|
8eb91c08aa | ||
|
|
d7868e9e5a | ||
|
|
6cab6d69d8 | ||
|
|
48375f3878 | ||
|
|
72d55a010b | ||
|
|
501c04ac2b | ||
|
|
6a55ca20f3 | ||
|
|
fd7d776121 | ||
|
|
0ecf8da27e | ||
|
|
2bff297f79 | ||
|
|
c7fa5167c4 | ||
|
|
d16a98c788 | ||
|
|
dd76bfa3c2 | ||
|
|
ef8aaee028 | ||
|
|
ab17006956 | ||
|
|
8097c681ac | ||
|
|
a11616aace | ||
|
|
6278240526 | ||
|
|
2e5868778a | ||
|
|
31257bd5cb | ||
|
|
4a202c5585 | ||
|
|
7e48015a54 | ||
|
|
1e03c628d5 | ||
|
|
1f9a241b94 | ||
|
|
d82f0cd757 | ||
|
|
72543789cb | ||
|
|
01ee524049 | ||
|
|
b5c28de03f | ||
|
|
bff0527857 | ||
|
|
e37f6cfda7 | ||
|
|
0da1950427 | ||
|
|
09462692f5 | ||
|
|
339ec07465 | ||
|
|
8046a3ccae | ||
|
|
9ab915a08b | ||
|
|
d0828744a2 | ||
|
|
0798d098ae | ||
|
|
dab7ddc2bb | ||
|
|
081a96e281 | ||
|
|
a7dd881d79 | ||
|
|
ba6756cd45 | ||
|
|
55f33357ea | ||
|
|
58b42fac5c | ||
|
|
ccf9636296 | ||
|
|
60b4ab6a63 | ||
|
|
6b46025261 | ||
|
|
fb439787a4 | ||
|
|
a8e03c6138 | ||
|
|
153351cc9f | ||
|
|
763877541a | ||
|
|
51ea2ea879 | ||
|
|
e413c0264a | ||
|
|
fb34c099d5 | ||
|
|
0d2b4af986 | ||
|
|
2b3cec06b3 | ||
|
|
a4194b14f9 | ||
|
|
f073ddc906 | ||
|
|
9b7ac58562 | ||
|
|
ff69bed394 | ||
|
|
361cc08faa | ||
|
|
198c485e9a | ||
|
|
84ad1c352d | ||
|
|
327eb4b39b | ||
|
|
9a5f01813b | ||
|
|
09c535f159 | ||
|
|
129f68e194 | ||
|
|
24f6f9b063 | ||
|
|
93c06eaba0 | ||
|
|
159ecd3e4f | ||
|
|
20befd1ca2 | ||
|
|
4aec4257da | ||
|
|
617738bb28 | ||
|
|
b63b2002a9 | ||
|
|
4d27f2b594 | ||
|
|
293f44c91a | ||
|
|
c2a9685480 | ||
|
|
cf0941cda9 | ||
|
|
1f77acdd22 | ||
|
|
192e418d08 | ||
|
|
4992002a28 | ||
|
|
29b04ee2f2 | ||
|
|
081ad3c30b | ||
|
|
3287b4a23b |
@@ -1,11 +1,11 @@
|
|||||||
# pulls community scripts from git repo
|
# pulls community scripts from git repo
|
||||||
FROM python:3.11.8-slim AS GET_SCRIPTS_STAGE
|
FROM python:3.10-slim AS GET_SCRIPTS_STAGE
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y --no-install-recommends git && \
|
apt-get install -y --no-install-recommends git && \
|
||||||
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
||||||
|
|
||||||
FROM python:3.11.8-slim
|
FROM python:3.10-slim
|
||||||
|
|
||||||
ENV TACTICAL_DIR /opt/tactical
|
ENV TACTICAL_DIR /opt/tactical
|
||||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||||
@@ -18,7 +18,7 @@ ENV PYTHONUNBUFFERED=1
|
|||||||
EXPOSE 8000 8383 8005
|
EXPOSE 8000 8383 8005
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y build-essential weasyprint
|
apt-get install -y build-essential
|
||||||
|
|
||||||
RUN groupadd -g 1000 tactical && \
|
RUN groupadd -g 1000 tactical && \
|
||||||
useradd -u 1000 -g 1000 tactical
|
useradd -u 1000 -g 1000 tactical
|
||||||
|
|||||||
@@ -22,6 +22,21 @@ services:
|
|||||||
aliases:
|
aliases:
|
||||||
- tactical-backend
|
- tactical-backend
|
||||||
|
|
||||||
|
app-dev:
|
||||||
|
container_name: trmm-app-dev
|
||||||
|
image: node:16-alpine
|
||||||
|
restart: always
|
||||||
|
command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||||
|
working_dir: /workspace/web
|
||||||
|
volumes:
|
||||||
|
- ..:/workspace:cached
|
||||||
|
ports:
|
||||||
|
- "8080:${APP_PORT}"
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-frontend
|
||||||
|
|
||||||
# nats
|
# nats
|
||||||
nats-dev:
|
nats-dev:
|
||||||
container_name: trmm-nats-dev
|
container_name: trmm-nats-dev
|
||||||
@@ -216,7 +231,6 @@ services:
|
|||||||
- "443:4443"
|
- "443:4443"
|
||||||
volumes:
|
volumes:
|
||||||
- tactical-data-dev:/opt/tactical
|
- tactical-data-dev:/opt/tactical
|
||||||
- ..:/workspace:cached
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
tactical-data-dev: null
|
tactical-data-dev: null
|
||||||
|
|||||||
@@ -15,7 +15,10 @@ set -e
|
|||||||
: "${MESH_PASS:=meshcentralpass}"
|
: "${MESH_PASS:=meshcentralpass}"
|
||||||
: "${MESH_HOST:=tactical-meshcentral}"
|
: "${MESH_HOST:=tactical-meshcentral}"
|
||||||
: "${API_HOST:=tactical-backend}"
|
: "${API_HOST:=tactical-backend}"
|
||||||
|
: "${APP_HOST:=tactical-frontend}"
|
||||||
: "${REDIS_HOST:=tactical-redis}"
|
: "${REDIS_HOST:=tactical-redis}"
|
||||||
|
: "${HTTP_PROTOCOL:=http}"
|
||||||
|
: "${APP_PORT:=8080}"
|
||||||
: "${API_PORT:=8000}"
|
: "${API_PORT:=8000}"
|
||||||
|
|
||||||
: "${CERT_PRIV_PATH:=${TACTICAL_DIR}/certs/privkey.pem}"
|
: "${CERT_PRIV_PATH:=${TACTICAL_DIR}/certs/privkey.pem}"
|
||||||
@@ -50,10 +53,7 @@ function django_setup {
|
|||||||
|
|
||||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||||
|
|
||||||
BASE_DOMAIN=$(echo "import tldextract; no_fetch_extract = tldextract.TLDExtract(suffix_list_urls=()); extracted = no_fetch_extract('${API_HOST}'); print(f'{extracted.domain}.{extracted.suffix}')" | python)
|
localvars="$(cat << EOF
|
||||||
|
|
||||||
localvars="$(
|
|
||||||
cat <<EOF
|
|
||||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||||
|
|
||||||
DEBUG = True
|
DEBUG = True
|
||||||
@@ -67,17 +67,11 @@ KEY_FILE = '${CERT_PRIV_PATH}'
|
|||||||
|
|
||||||
SCRIPTS_DIR = '/community-scripts'
|
SCRIPTS_DIR = '/community-scripts'
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ['${API_HOST}', '*']
|
||||||
|
|
||||||
ADMIN_URL = 'admin/'
|
ADMIN_URL = 'admin/'
|
||||||
|
|
||||||
ALLOWED_HOSTS = ['${API_HOST}', '${APP_HOST}', '*']
|
CORS_ORIGIN_ALLOW_ALL = True
|
||||||
|
|
||||||
CORS_ORIGIN_WHITELIST = ['https://${APP_HOST}']
|
|
||||||
|
|
||||||
SESSION_COOKIE_DOMAIN = '${BASE_DOMAIN}'
|
|
||||||
CSRF_COOKIE_DOMAIN = '${BASE_DOMAIN}'
|
|
||||||
CSRF_TRUSTED_ORIGINS = ['https://${API_HOST}', 'https://${APP_HOST}']
|
|
||||||
|
|
||||||
HEADLESS_FRONTEND_URLS = {'socialaccount_login_error': 'https://${APP_HOST}/account/provider/callback'}
|
|
||||||
|
|
||||||
DATABASES = {
|
DATABASES = {
|
||||||
'default': {
|
'default': {
|
||||||
@@ -87,17 +81,6 @@ DATABASES = {
|
|||||||
'PASSWORD': '${POSTGRES_PASS}',
|
'PASSWORD': '${POSTGRES_PASS}',
|
||||||
'HOST': '${POSTGRES_HOST}',
|
'HOST': '${POSTGRES_HOST}',
|
||||||
'PORT': '${POSTGRES_PORT}',
|
'PORT': '${POSTGRES_PORT}',
|
||||||
},
|
|
||||||
'reporting': {
|
|
||||||
'ENGINE': 'django.db.backends.postgresql',
|
|
||||||
'NAME': '${POSTGRES_DB}',
|
|
||||||
'USER': 'reporting_user',
|
|
||||||
'PASSWORD': 'read_password',
|
|
||||||
'HOST': '${POSTGRES_HOST}',
|
|
||||||
'PORT': '${POSTGRES_PORT}',
|
|
||||||
'OPTIONS': {
|
|
||||||
'options': '-c default_transaction_read_only=on'
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -107,7 +90,6 @@ MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
|||||||
REDIS_HOST = '${REDIS_HOST}'
|
REDIS_HOST = '${REDIS_HOST}'
|
||||||
MESH_WS_URL = '${MESH_WS_URL}'
|
MESH_WS_URL = '${MESH_WS_URL}'
|
||||||
ADMIN_ENABLED = True
|
ADMIN_ENABLED = True
|
||||||
TRMM_INSECURE = True
|
|
||||||
EOF
|
EOF
|
||||||
)"
|
)"
|
||||||
|
|
||||||
@@ -116,7 +98,6 @@ EOF
|
|||||||
# run migrations and init scripts
|
# run migrations and init scripts
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py pre_update_tasks
|
"${VIRTUAL_ENV}"/bin/python manage.py pre_update_tasks
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
|
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py generate_json_schemas
|
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
|
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup
|
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_mesh_setup
|
"${VIRTUAL_ENV}"/bin/python manage.py initial_mesh_setup
|
||||||
@@ -127,6 +108,7 @@ EOF
|
|||||||
"${VIRTUAL_ENV}"/bin/python manage.py create_installer_user
|
"${VIRTUAL_ENV}"/bin/python manage.py create_installer_user
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py post_update_tasks
|
"${VIRTUAL_ENV}"/bin/python manage.py post_update_tasks
|
||||||
|
|
||||||
|
|
||||||
# create super user
|
# create super user
|
||||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||||
}
|
}
|
||||||
@@ -141,8 +123,6 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
|||||||
mkdir -p /meshcentral-data
|
mkdir -p /meshcentral-data
|
||||||
mkdir -p ${TACTICAL_DIR}/tmp
|
mkdir -p ${TACTICAL_DIR}/tmp
|
||||||
mkdir -p ${TACTICAL_DIR}/certs
|
mkdir -p ${TACTICAL_DIR}/certs
|
||||||
mkdir -p ${TACTICAL_DIR}/reporting
|
|
||||||
mkdir -p ${TACTICAL_DIR}/reporting/assets
|
|
||||||
mkdir -p /mongo/data/db
|
mkdir -p /mongo/data/db
|
||||||
mkdir -p /redis/data
|
mkdir -p /redis/data
|
||||||
touch /meshcentral-data/.initialized && chown -R 1000:1000 /meshcentral-data
|
touch /meshcentral-data/.initialized && chown -R 1000:1000 /meshcentral-data
|
||||||
@@ -150,7 +130,6 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
|||||||
touch ${TACTICAL_DIR}/certs/.initialized && chown -R 1000:1000 ${TACTICAL_DIR}/certs
|
touch ${TACTICAL_DIR}/certs/.initialized && chown -R 1000:1000 ${TACTICAL_DIR}/certs
|
||||||
touch /mongo/data/db/.initialized && chown -R 1000:1000 /mongo/data/db
|
touch /mongo/data/db/.initialized && chown -R 1000:1000 /mongo/data/db
|
||||||
touch /redis/data/.initialized && chown -R 1000:1000 /redis/data
|
touch /redis/data/.initialized && chown -R 1000:1000 /redis/data
|
||||||
touch ${TACTICAL_DIR}/reporting && chown -R 1000:1000 ${TACTICAL_DIR}/reporting
|
|
||||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
|
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
|
||||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/log
|
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/log
|
||||||
touch ${TACTICAL_DIR}/api/tacticalrmm/private/log/django_debug.log
|
touch ${TACTICAL_DIR}/api/tacticalrmm/private/log/django_debug.log
|
||||||
@@ -163,6 +142,16 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
|||||||
|
|
||||||
django_setup
|
django_setup
|
||||||
|
|
||||||
|
# create .env file for frontend
|
||||||
|
webenv="$(cat << EOF
|
||||||
|
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||||
|
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||||
|
APP_URL = "https://${APP_HOST}"
|
||||||
|
DOCKER_BUILD = 1
|
||||||
|
EOF
|
||||||
|
)"
|
||||||
|
echo "${webenv}" | tee "${WORKSPACE_DIR}"/web/.env > /dev/null
|
||||||
|
|
||||||
# chown everything to tactical user
|
# chown everything to tactical user
|
||||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
|
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
|
||||||
|
|||||||
@@ -1,3 +1,41 @@
|
|||||||
-r /workspace/api/tacticalrmm/requirements.txt
|
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||||
-r /workspace/api/tacticalrmm/requirements-dev.txt
|
asgiref==3.5.0
|
||||||
-r /workspace/api/tacticalrmm/requirements-test.txt
|
celery==5.2.6
|
||||||
|
channels==3.0.4
|
||||||
|
channels_redis==3.4.0
|
||||||
|
daphne==3.0.2
|
||||||
|
Django==4.0.4
|
||||||
|
django-cors-headers==3.11.0
|
||||||
|
django-ipware==4.0.2
|
||||||
|
django-rest-knox==4.2.0
|
||||||
|
djangorestframework==3.13.1
|
||||||
|
future==0.18.2
|
||||||
|
msgpack==1.0.3
|
||||||
|
nats-py==2.1.0
|
||||||
|
packaging==21.3
|
||||||
|
psycopg2-binary==2.9.3
|
||||||
|
pycryptodome==3.14.1
|
||||||
|
pyotp==2.6.0
|
||||||
|
pytz==2022.1
|
||||||
|
qrcode==7.3.1
|
||||||
|
redis==4.2.2
|
||||||
|
requests==2.27.1
|
||||||
|
twilio==7.8.1
|
||||||
|
urllib3==1.26.9
|
||||||
|
validators==0.18.2
|
||||||
|
websockets==10.2
|
||||||
|
drf_spectacular==0.22.0
|
||||||
|
meshctrl==0.1.15
|
||||||
|
hiredis==2.0.0
|
||||||
|
|
||||||
|
# dev
|
||||||
|
black==22.3.0
|
||||||
|
django-extensions==3.1.5
|
||||||
|
isort==5.10.1
|
||||||
|
mypy==0.942
|
||||||
|
types-pytz==2021.3.6
|
||||||
|
model-bakery==1.5.0
|
||||||
|
coverage==6.3.2
|
||||||
|
django-silk==4.3.0
|
||||||
|
django-stubs==1.10.1
|
||||||
|
djangorestframework-stubs==1.5.0
|
||||||
4
.github/FUNDING.yml
vendored
4
.github/FUNDING.yml
vendored
@@ -1,9 +1,9 @@
|
|||||||
# These are supported funding model platforms
|
# These are supported funding model platforms
|
||||||
|
|
||||||
github: amidaware
|
github: wh1te909
|
||||||
patreon: # Replace with a single Patreon username
|
patreon: # Replace with a single Patreon username
|
||||||
open_collective: # Replace with a single Open Collective username
|
open_collective: # Replace with a single Open Collective username
|
||||||
ko_fi: # tacticalrmm
|
ko_fi: tacticalrmm
|
||||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
liberapay: # Replace with a single Liberapay username
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
|||||||
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -14,12 +14,11 @@ assignees: ''
|
|||||||
|
|
||||||
**Installation Method:**
|
**Installation Method:**
|
||||||
- [ ] Standard
|
- [ ] Standard
|
||||||
- [ ] Standard with `--insecure` flag at install
|
|
||||||
- [ ] Docker
|
- [ ] Docker
|
||||||
|
|
||||||
**Agent Info (please complete the following information):**
|
**Agent Info (please complete the following information):**
|
||||||
- Agent version (as shown in the 'Summary' tab of the agent from web UI):
|
- Agent version (as shown in the 'Summary' tab of the agent from web UI):
|
||||||
- Agent OS: [e.g. Win 10 v2004, Server 2016]
|
- Agent OS: [e.g. Win 10 v2004, Server 2012 R2]
|
||||||
|
|
||||||
**Describe the bug**
|
**Describe the bug**
|
||||||
A clear and concise description of what the bug is.
|
A clear and concise description of what the bug is.
|
||||||
|
|||||||
82
.github/workflows/ci-tests.yml
vendored
82
.github/workflows/ci-tests.yml
vendored
@@ -10,37 +10,23 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: self-hosted
|
||||||
name: Tests
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.11.8"]
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- uses: harmon758/postgresql-action@v1
|
- name: Setup virtual env and install requirements
|
||||||
with:
|
|
||||||
postgresql version: "15"
|
|
||||||
postgresql db: "pipeline"
|
|
||||||
postgresql user: "pipeline"
|
|
||||||
postgresql password: "pipeline123456"
|
|
||||||
|
|
||||||
- name: Setup Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
check-latest: true
|
|
||||||
|
|
||||||
- name: Install redis
|
|
||||||
run: |
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install -y redis
|
|
||||||
redis-server --version
|
|
||||||
|
|
||||||
- name: Install requirements
|
|
||||||
working-directory: api/tacticalrmm
|
|
||||||
run: |
|
run: |
|
||||||
|
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS pipeline'
|
||||||
|
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS test_pipeline'
|
||||||
|
sudo -u postgres psql -c 'CREATE DATABASE pipeline'
|
||||||
|
sudo -u postgres psql -c "SET client_encoding = 'UTF8'" pipeline
|
||||||
|
pwd
|
||||||
|
rm -rf /actions-runner/_work/trmm-actions/trmm-actions/api/env
|
||||||
|
cd api
|
||||||
|
python3.10 -m venv env
|
||||||
|
source env/bin/activate
|
||||||
|
cd tacticalrmm
|
||||||
python --version
|
python --version
|
||||||
SETTINGS_FILE="tacticalrmm/settings.py"
|
SETTINGS_FILE="tacticalrmm/settings.py"
|
||||||
SETUPTOOLS_VER=$(grep "^SETUPTOOLS_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
|
SETUPTOOLS_VER=$(grep "^SETUPTOOLS_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
|
||||||
@@ -49,34 +35,32 @@ jobs:
|
|||||||
pip install setuptools==${SETUPTOOLS_VER} wheel==${WHEEL_VER}
|
pip install setuptools==${SETUPTOOLS_VER} wheel==${WHEEL_VER}
|
||||||
pip install -r requirements.txt -r requirements-test.txt
|
pip install -r requirements.txt -r requirements-test.txt
|
||||||
|
|
||||||
- name: Codestyle black
|
|
||||||
working-directory: api
|
|
||||||
run: |
|
|
||||||
black --exclude migrations/ --check --diff tacticalrmm
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Lint with flake8
|
|
||||||
working-directory: api/tacticalrmm
|
|
||||||
run: |
|
|
||||||
flake8 --config .flake8 .
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Run django tests
|
- name: Run django tests
|
||||||
env:
|
env:
|
||||||
GHACTIONS: "yes"
|
GHACTIONS: "yes"
|
||||||
working-directory: api/tacticalrmm
|
|
||||||
run: |
|
run: |
|
||||||
pytest
|
cd api/tacticalrmm
|
||||||
|
source ../env/bin/activate
|
||||||
|
rm -f .coverage coverage.lcov
|
||||||
|
coverage run --concurrency=multiprocessing manage.py test -v 2 --parallel
|
||||||
|
coverage combine
|
||||||
|
coverage lcov
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: codecov/codecov-action@v3
|
- name: Codestyle black
|
||||||
|
run: |
|
||||||
|
cd api
|
||||||
|
source env/bin/activate
|
||||||
|
black --exclude migrations/ --check tacticalrmm
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Coveralls
|
||||||
|
uses: coverallsapp/github-action@master
|
||||||
with:
|
with:
|
||||||
directory: ./api/tacticalrmm
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
files: ./api/tacticalrmm/coverage.xml
|
path-to-lcov: ./api/tacticalrmm/coverage.lcov
|
||||||
verbose: true
|
base-path: ./api/tacticalrmm
|
||||||
|
|||||||
70
.github/workflows/codeql-analysis.yml
vendored
Normal file
70
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
# For most projects, this workflow file will not need changing; you simply need
|
||||||
|
# to commit it to your repository.
|
||||||
|
#
|
||||||
|
# You may wish to alter this file to override the set of languages analyzed,
|
||||||
|
# or to provide custom queries or build logic.
|
||||||
|
#
|
||||||
|
# ******** NOTE ********
|
||||||
|
# We have attempted to detect the languages in your repository. Please check
|
||||||
|
# the `language` matrix defined below to confirm you have the correct set of
|
||||||
|
# supported CodeQL languages.
|
||||||
|
#
|
||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ develop ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [ develop ]
|
||||||
|
schedule:
|
||||||
|
- cron: '19 14 * * 6'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'go', 'javascript', 'python' ]
|
||||||
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
|
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v1
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v1
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 https://git.io/JvXDl
|
||||||
|
|
||||||
|
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||||
|
# and modify them (or add more) to build your code if your project
|
||||||
|
# uses a compiled language
|
||||||
|
|
||||||
|
#- run: |
|
||||||
|
# make bootstrap
|
||||||
|
# make release
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v1
|
||||||
34
.github/workflows/devskim-analysis.yml
vendored
Normal file
34
.github/workflows/devskim-analysis.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# This workflow uses actions that are not certified by GitHub.
|
||||||
|
# They are provided by a third-party and are governed by
|
||||||
|
# separate terms of service, privacy policy, and support
|
||||||
|
# documentation.
|
||||||
|
|
||||||
|
name: DevSkim
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ develop ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ develop ]
|
||||||
|
schedule:
|
||||||
|
- cron: '19 5 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
name: DevSkim
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Run DevSkim scanner
|
||||||
|
uses: microsoft/DevSkim-Action@v1
|
||||||
|
|
||||||
|
- name: Upload DevSkim scan results to GitHub Security tab
|
||||||
|
uses: github/codeql-action/upload-sarif@v1
|
||||||
|
with:
|
||||||
|
sarif_file: devskim-results.sarif
|
||||||
2
.github/workflows/docker-build-push.yml
vendored
2
.github/workflows/docker-build-push.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Get Github Tag
|
- name: Get Github Tag
|
||||||
id: prep
|
id: prep
|
||||||
|
|||||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -53,9 +53,3 @@ nats-api.conf
|
|||||||
ignore/
|
ignore/
|
||||||
coverage.lcov
|
coverage.lcov
|
||||||
daphne.sock.lock
|
daphne.sock.lock
|
||||||
.pytest_cache
|
|
||||||
coverage.xml
|
|
||||||
setup_dev.yml
|
|
||||||
11env/
|
|
||||||
query_schema.json
|
|
||||||
gunicorn_config.py
|
|
||||||
23
.vscode/extensions.json
vendored
23
.vscode/extensions.json
vendored
@@ -1,23 +0,0 @@
|
|||||||
{
|
|
||||||
"recommendations": [
|
|
||||||
// frontend
|
|
||||||
"dbaeumer.vscode-eslint",
|
|
||||||
"esbenp.prettier-vscode",
|
|
||||||
"editorconfig.editorconfig",
|
|
||||||
"vue.volar",
|
|
||||||
"wayou.vscode-todo-highlight",
|
|
||||||
|
|
||||||
// python
|
|
||||||
"matangover.mypy",
|
|
||||||
"ms-python.python",
|
|
||||||
|
|
||||||
// golang
|
|
||||||
"golang.go"
|
|
||||||
],
|
|
||||||
"unwantedRecommendations": [
|
|
||||||
"octref.vetur",
|
|
||||||
"hookyqr.beautify",
|
|
||||||
"dbaeumer.jshint",
|
|
||||||
"ms-vscode.vscode-typescript-tslint-plugin"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
61
.vscode/settings.json
vendored
61
.vscode/settings.json
vendored
@@ -1,21 +1,40 @@
|
|||||||
{
|
{
|
||||||
"python.defaultInterpreterPath": "api/env/bin/python",
|
"python.defaultInterpreterPath": "api/tacticalrmm/env/bin/python",
|
||||||
"python.languageServer": "Pylance",
|
"python.languageServer": "Pylance",
|
||||||
"python.analysis.extraPaths": ["api/tacticalrmm", "api/env"],
|
"python.analysis.extraPaths": [
|
||||||
|
"api/tacticalrmm",
|
||||||
|
"api/env",
|
||||||
|
],
|
||||||
"python.analysis.diagnosticSeverityOverrides": {
|
"python.analysis.diagnosticSeverityOverrides": {
|
||||||
"reportUnusedImport": "error",
|
"reportUnusedImport": "error",
|
||||||
"reportDuplicateImport": "error",
|
"reportDuplicateImport": "error",
|
||||||
"reportGeneralTypeIssues": "none",
|
"reportGeneralTypeIssues": "none"
|
||||||
"reportOptionalMemberAccess": "none",
|
|
||||||
},
|
},
|
||||||
"python.analysis.typeCheckingMode": "basic",
|
"python.analysis.typeCheckingMode": "basic",
|
||||||
"editor.bracketPairColorization.enabled": true,
|
"mypy.runUsingActiveInterpreter": true,
|
||||||
"editor.guides.bracketPairs": true,
|
"python.linting.enabled": true,
|
||||||
|
"python.linting.mypyEnabled": true,
|
||||||
|
"python.linting.mypyArgs": [
|
||||||
|
"--ignore-missing-imports",
|
||||||
|
"--follow-imports=silent",
|
||||||
|
"--show-column-numbers",
|
||||||
|
"--strict"
|
||||||
|
],
|
||||||
|
"python.formatting.provider": "black",
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
"files.associations": {
|
"vetur.format.defaultFormatter.js": "prettier",
|
||||||
"**/ansible/**/*.yml": "ansible",
|
"vetur.format.defaultFormatterOptions": {
|
||||||
"**/docker/**/docker-compose*.yml": "dockercompose"
|
"prettier": {
|
||||||
|
"semi": true,
|
||||||
|
"printWidth": 120,
|
||||||
|
"tabWidth": 2,
|
||||||
|
"useTabs": false,
|
||||||
|
"arrowParens": "avoid",
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
"vetur.format.options.tabSize": 2,
|
||||||
|
"vetur.format.options.useTabs": false,
|
||||||
|
"files.watcherExclude": {
|
||||||
"files.watcherExclude": {
|
"files.watcherExclude": {
|
||||||
"**/.git/objects/**": true,
|
"**/.git/objects/**": true,
|
||||||
"**/.git/subtree-cache/**": true,
|
"**/.git/subtree-cache/**": true,
|
||||||
@@ -36,24 +55,32 @@
|
|||||||
"**/*.pyc": true,
|
"**/*.pyc": true,
|
||||||
"**/*.zip": true
|
"**/*.zip": true
|
||||||
},
|
},
|
||||||
|
},
|
||||||
"go.useLanguageServer": true,
|
"go.useLanguageServer": true,
|
||||||
"[go]": {
|
"[go]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
"editor.codeActionsOnSave": {
|
"editor.codeActionsOnSave": {
|
||||||
"source.organizeImports": "never"
|
"source.organizeImports": false,
|
||||||
},
|
},
|
||||||
"editor.snippetSuggestions": "none"
|
"editor.snippetSuggestions": "none",
|
||||||
},
|
},
|
||||||
"[go.mod]": {
|
"[go.mod]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
"editor.codeActionsOnSave": {
|
"editor.codeActionsOnSave": {
|
||||||
"source.organizeImports": "explicit"
|
"source.organizeImports": true,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
"gopls": {
|
"gopls": {
|
||||||
"usePlaceholders": true,
|
"usePlaceholders": true,
|
||||||
"completeUnimported": true,
|
"completeUnimported": true,
|
||||||
"staticcheck": true
|
"staticcheck": true,
|
||||||
},
|
},
|
||||||
"[python]": {
|
"mypy.targets": [
|
||||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
"api/tacticalrmm"
|
||||||
}
|
],
|
||||||
|
"python.linting.ignorePatterns": [
|
||||||
|
"**/site-packages/**/*.py",
|
||||||
|
".vscode/*.py",
|
||||||
|
"**env/**"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
23
README.md
23
README.md
@@ -1,14 +1,13 @@
|
|||||||
# Tactical RMM
|
# Tactical RMM
|
||||||
|
|
||||||

|

|
||||||
[](https://codecov.io/gh/amidaware/tacticalrmm)
|
[](https://coveralls.io/github/amidaware/tacticalrmm?branch=develop)
|
||||||
[](https://github.com/python/black)
|
[](https://github.com/python/black)
|
||||||
|
|
||||||
Tactical RMM is a remote monitoring & management tool, built with Django and Vue.\
|
Tactical RMM is a remote monitoring & management tool, built with Django and Vue.\
|
||||||
It uses an [agent](https://github.com/amidaware/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
It uses an [agent](https://github.com/amidaware/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||||
|
|
||||||
# [LIVE DEMO](https://demo.tacticalrmm.com/)
|
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||||
|
|
||||||
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
||||||
|
|
||||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||||
@@ -20,11 +19,11 @@ Demo database resets every hour. A lot of features are disabled for obvious reas
|
|||||||
- Teamviewer-like remote desktop control
|
- Teamviewer-like remote desktop control
|
||||||
- Real-time remote shell
|
- Real-time remote shell
|
||||||
- Remote file browser (download and upload files)
|
- Remote file browser (download and upload files)
|
||||||
- Remote command and script execution (batch, powershell, python, nushell and deno scripts)
|
- Remote command and script execution (batch, powershell and python scripts)
|
||||||
- Event log viewer
|
- Event log viewer
|
||||||
- Services management
|
- Services management
|
||||||
- Windows patch management
|
- Windows patch management
|
||||||
- Automated checks with email/SMS/Webhook alerting (cpu, disk, memory, services, scripts, event logs)
|
- Automated checks with email/SMS alerting (cpu, disk, memory, services, scripts, event logs)
|
||||||
- Automated task runner (run scripts on a schedule)
|
- Automated task runner (run scripts on a schedule)
|
||||||
- Remote software installation via chocolatey
|
- Remote software installation via chocolatey
|
||||||
- Software and hardware inventory
|
- Software and hardware inventory
|
||||||
@@ -34,19 +33,7 @@ Demo database resets every hour. A lot of features are disabled for obvious reas
|
|||||||
- Windows 7, 8.1, 10, 11, Server 2008R2, 2012R2, 2016, 2019, 2022
|
- Windows 7, 8.1, 10, 11, Server 2008R2, 2012R2, 2016, 2019, 2022
|
||||||
|
|
||||||
## Linux agent versions supported
|
## Linux agent versions supported
|
||||||
|
- Any distro with systemd
|
||||||
- Any distro with systemd which includes but is not limited to: Debian (10, 11), Ubuntu x86_64 (18.04, 20.04, 22.04), Synology 7, centos, freepbx and more!
|
|
||||||
|
|
||||||
## Mac agent versions supported
|
|
||||||
|
|
||||||
- 64 bit Intel and Apple Silicon (M-Series)
|
|
||||||
|
|
||||||
## Sponsorship Features
|
|
||||||
|
|
||||||
- Mac and Linux Agents
|
|
||||||
- Windows [Code Signed](https://docs.tacticalrmm.com/code_signing/) Agents
|
|
||||||
- Fully Customizable [Reporting](https://docs.tacticalrmm.com/ee/reporting/reporting_overview/) Module
|
|
||||||
- [Single Sign-On](https://docs.tacticalrmm.com/ee/sso/sso/) (SSO)
|
|
||||||
|
|
||||||
## Installation / Backup / Restore / Usage
|
## Installation / Backup / Restore / Usage
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,10 @@
|
|||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
|
|
||||||
[Latest](https://github.com/amidaware/tacticalrmm/releases/latest) release
|
| Version | Supported |
|
||||||
|
| ------- | ------------------ |
|
||||||
|
| 0.12.2 | :white_check_mark: |
|
||||||
|
| < 0.12.2 | :x: |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
### tacticalrmm ansible WIP
|
|
||||||
|
|
||||||
ansible role to setup a Debian 11 VM for tacticalrmm local development
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
---
|
|
||||||
user: "tactical"
|
|
||||||
python_ver: "3.11.8"
|
|
||||||
go_ver: "1.20.7"
|
|
||||||
backend_repo: "https://github.com/amidaware/tacticalrmm.git"
|
|
||||||
frontend_repo: "https://github.com/amidaware/tacticalrmm-web.git"
|
|
||||||
scripts_repo: "https://github.com/amidaware/community-scripts.git"
|
|
||||||
backend_dir: "/opt/trmm"
|
|
||||||
frontend_dir: "/opt/trmm-web"
|
|
||||||
scripts_dir: "/opt/trmm-community-scripts"
|
|
||||||
trmm_dir: "{{ backend_dir }}/api/tacticalrmm/tacticalrmm"
|
|
||||||
mesh_dir: "/opt/meshcentral"
|
|
||||||
settings_file: "{{ trmm_dir }}/settings.py"
|
|
||||||
local_settings_file: "{{ trmm_dir }}/local_settings.py"
|
|
||||||
fullchain_dest: /etc/ssl/certs/fullchain.pem
|
|
||||||
privkey_dest: /etc/ssl/certs/privkey.pem
|
|
||||||
|
|
||||||
base_pkgs:
|
|
||||||
- build-essential
|
|
||||||
- curl
|
|
||||||
- wget
|
|
||||||
- dirmngr
|
|
||||||
- gnupg
|
|
||||||
- openssl
|
|
||||||
- gcc
|
|
||||||
- g++
|
|
||||||
- make
|
|
||||||
- ca-certificates
|
|
||||||
- git
|
|
||||||
|
|
||||||
python_pkgs:
|
|
||||||
- zlib1g-dev
|
|
||||||
- libncurses5-dev
|
|
||||||
- libgdbm-dev
|
|
||||||
- libnss3-dev
|
|
||||||
- libssl-dev
|
|
||||||
- libreadline-dev
|
|
||||||
- libffi-dev
|
|
||||||
- libsqlite3-dev
|
|
||||||
- libbz2-dev
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
worker_rlimit_nofile 1000000;
|
|
||||||
user www-data;
|
|
||||||
worker_processes auto;
|
|
||||||
pid /run/nginx.pid;
|
|
||||||
include /etc/nginx/modules-enabled/*.conf;
|
|
||||||
|
|
||||||
events {
|
|
||||||
worker_connections 4096;
|
|
||||||
}
|
|
||||||
|
|
||||||
http {
|
|
||||||
sendfile on;
|
|
||||||
server_tokens off;
|
|
||||||
tcp_nopush on;
|
|
||||||
types_hash_max_size 2048;
|
|
||||||
server_names_hash_bucket_size 256;
|
|
||||||
include /etc/nginx/mime.types;
|
|
||||||
default_type application/octet-stream;
|
|
||||||
ssl_protocols TLSv1.2 TLSv1.3;
|
|
||||||
ssl_prefer_server_ciphers on;
|
|
||||||
ssl_ciphers EECDH+AESGCM:EDH+AESGCM;
|
|
||||||
ssl_ecdh_curve secp384r1;
|
|
||||||
ssl_stapling on;
|
|
||||||
ssl_stapling_verify on;
|
|
||||||
add_header X-Content-Type-Options nosniff;
|
|
||||||
access_log /var/log/nginx/access.log;
|
|
||||||
error_log /var/log/nginx/error.log;
|
|
||||||
gzip on;
|
|
||||||
include /etc/nginx/conf.d/*.conf;
|
|
||||||
include /etc/nginx/sites-enabled/*;
|
|
||||||
}
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
" This file loads the default vim options at the beginning and prevents
|
|
||||||
" that they are being loaded again later. All other options that will be set,
|
|
||||||
" are added, or overwrite the default settings. Add as many options as you
|
|
||||||
" whish at the end of this file.
|
|
||||||
|
|
||||||
" Load the defaults
|
|
||||||
source $VIMRUNTIME/defaults.vim
|
|
||||||
|
|
||||||
" Prevent the defaults from being loaded again later, if the user doesn't
|
|
||||||
" have a local vimrc (~/.vimrc)
|
|
||||||
let skip_defaults_vim = 1
|
|
||||||
|
|
||||||
|
|
||||||
" Set more options (overwrites settings from /usr/share/vim/vim80/defaults.vim)
|
|
||||||
" Add as many options as you whish
|
|
||||||
|
|
||||||
" Set the mouse mode to 'r'
|
|
||||||
if has('mouse')
|
|
||||||
set mouse=r
|
|
||||||
endif
|
|
||||||
@@ -1,634 +0,0 @@
|
|||||||
---
|
|
||||||
- name: Append subdomains to hosts
|
|
||||||
tags: hosts
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.lineinfile:
|
|
||||||
path: /etc/hosts
|
|
||||||
backrefs: yes
|
|
||||||
regexp: '^(127\.0\.1\.1 .*)$'
|
|
||||||
line: "\\1 {{ api }} {{ mesh }} {{ rmm }}"
|
|
||||||
|
|
||||||
- name: set mouse mode for vim
|
|
||||||
tags: vim
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.copy:
|
|
||||||
src: vimrc.local
|
|
||||||
dest: /etc/vim/vimrc.local
|
|
||||||
owner: "root"
|
|
||||||
group: "root"
|
|
||||||
mode: "0644"
|
|
||||||
|
|
||||||
- name: set max_user_watches
|
|
||||||
tags: sysctl
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.lineinfile:
|
|
||||||
path: /etc/sysctl.conf
|
|
||||||
line: fs.inotify.max_user_watches=524288
|
|
||||||
|
|
||||||
- name: reload sysctl
|
|
||||||
tags: sysctl
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.command:
|
|
||||||
cmd: sysctl -p
|
|
||||||
|
|
||||||
- name: install base packages
|
|
||||||
tags: base
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.apt:
|
|
||||||
pkg: "{{ item }}"
|
|
||||||
state: present
|
|
||||||
update_cache: yes
|
|
||||||
with_items:
|
|
||||||
- "{{ base_pkgs }}"
|
|
||||||
|
|
||||||
- name: set arch fact
|
|
||||||
ansible.builtin.set_fact:
|
|
||||||
goarch: "{{ 'amd64' if ansible_architecture == 'x86_64' else 'arm64' }}"
|
|
||||||
|
|
||||||
- name: download and install golang
|
|
||||||
tags: golang
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.unarchive:
|
|
||||||
src: "https://go.dev/dl/go{{ go_ver }}.linux-{{ goarch }}.tar.gz"
|
|
||||||
dest: /usr/local
|
|
||||||
remote_src: yes
|
|
||||||
|
|
||||||
- name: add golang to path
|
|
||||||
become: yes
|
|
||||||
tags: golang
|
|
||||||
ansible.builtin.copy:
|
|
||||||
dest: /etc/profile.d/golang.sh
|
|
||||||
content: "PATH=$PATH:/usr/local/go/bin"
|
|
||||||
|
|
||||||
- name: install python prereqs
|
|
||||||
tags: python
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.apt:
|
|
||||||
pkg: "{{ item }}"
|
|
||||||
state: present
|
|
||||||
with_items:
|
|
||||||
- "{{ python_pkgs }}"
|
|
||||||
|
|
||||||
- name: get cpu core count
|
|
||||||
tags: python
|
|
||||||
ansible.builtin.command: nproc
|
|
||||||
register: numprocs
|
|
||||||
|
|
||||||
- name: Create python tmpdir
|
|
||||||
tags: python
|
|
||||||
ansible.builtin.tempfile:
|
|
||||||
state: directory
|
|
||||||
suffix: python
|
|
||||||
register: python_tmp
|
|
||||||
|
|
||||||
- name: download and extract python
|
|
||||||
tags: python
|
|
||||||
ansible.builtin.unarchive:
|
|
||||||
src: "https://www.python.org/ftp/python/{{ python_ver }}/Python-{{ python_ver }}.tgz"
|
|
||||||
dest: "{{ python_tmp.path }}"
|
|
||||||
remote_src: yes
|
|
||||||
|
|
||||||
- name: compile python
|
|
||||||
tags: python
|
|
||||||
ansible.builtin.shell:
|
|
||||||
chdir: "{{ python_tmp.path }}/Python-{{ python_ver }}"
|
|
||||||
cmd: |
|
|
||||||
./configure --enable-optimizations
|
|
||||||
make -j {{ numprocs.stdout }}
|
|
||||||
|
|
||||||
- name: alt install python
|
|
||||||
tags: python
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.shell:
|
|
||||||
chdir: "{{ python_tmp.path }}/Python-{{ python_ver }}"
|
|
||||||
cmd: |
|
|
||||||
make altinstall
|
|
||||||
|
|
||||||
- name: install redis
|
|
||||||
tags: redis
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.apt:
|
|
||||||
pkg: redis
|
|
||||||
state: present
|
|
||||||
|
|
||||||
- name: create postgres repo
|
|
||||||
tags: postgres
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.copy:
|
|
||||||
content: "deb http://apt.postgresql.org/pub/repos/apt {{ ansible_distribution_release }}-pgdg main"
|
|
||||||
dest: /etc/apt/sources.list.d/pgdg.list
|
|
||||||
owner: root
|
|
||||||
group: root
|
|
||||||
mode: "0644"
|
|
||||||
|
|
||||||
- name: import postgres repo signing key
|
|
||||||
tags: postgres
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.apt_key:
|
|
||||||
url: https://www.postgresql.org/media/keys/ACCC4CF8.asc
|
|
||||||
state: present
|
|
||||||
|
|
||||||
- name: install postgresql
|
|
||||||
tags: postgres
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.apt:
|
|
||||||
pkg: postgresql-15
|
|
||||||
state: present
|
|
||||||
update_cache: yes
|
|
||||||
|
|
||||||
- name: ensure postgres enabled and started
|
|
||||||
tags: postgres
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.service:
|
|
||||||
name: postgresql
|
|
||||||
enabled: yes
|
|
||||||
state: started
|
|
||||||
|
|
||||||
- name: setup trmm database
|
|
||||||
tags: postgres
|
|
||||||
become: yes
|
|
||||||
become_user: postgres
|
|
||||||
ansible.builtin.shell:
|
|
||||||
cmd: |
|
|
||||||
psql -c "CREATE DATABASE tacticalrmm"
|
|
||||||
psql -c "CREATE USER {{ db_user }} WITH PASSWORD '{{ db_passwd }}'"
|
|
||||||
psql -c "ALTER ROLE {{ db_user }} SET client_encoding TO 'utf8'"
|
|
||||||
psql -c "ALTER ROLE {{ db_user }} SET default_transaction_isolation TO 'read committed'"
|
|
||||||
psql -c "ALTER ROLE {{ db_user }} SET timezone TO 'UTC'"
|
|
||||||
psql -c "ALTER ROLE {{ db_user }} CREATEDB"
|
|
||||||
psql -c "GRANT ALL PRIVILEGES ON DATABASE tacticalrmm TO {{ db_user }}"
|
|
||||||
psql -c "ALTER DATABASE tacticalrmm OWNER TO {{ db_user }}"
|
|
||||||
psql -c "GRANT USAGE, CREATE ON SCHEMA PUBLIC TO {{ db_user }}"
|
|
||||||
|
|
||||||
- name: setup mesh database
|
|
||||||
tags: postgres
|
|
||||||
become: yes
|
|
||||||
become_user: postgres
|
|
||||||
ansible.builtin.shell:
|
|
||||||
cmd: |
|
|
||||||
psql -c "CREATE DATABASE meshcentral"
|
|
||||||
psql -c "CREATE USER {{ mesh_db_user }} WITH PASSWORD '{{ mesh_db_passwd }}'"
|
|
||||||
psql -c "ALTER ROLE {{ mesh_db_user }} SET client_encoding TO 'utf8'"
|
|
||||||
psql -c "ALTER ROLE {{ mesh_db_user }} SET default_transaction_isolation TO 'read committed'"
|
|
||||||
psql -c "ALTER ROLE {{ mesh_db_user }} SET timezone TO 'UTC'"
|
|
||||||
psql -c "GRANT ALL PRIVILEGES ON DATABASE meshcentral TO {{ mesh_db_user }}"
|
|
||||||
psql -c "ALTER DATABASE meshcentral OWNER TO {{ mesh_db_user }}"
|
|
||||||
psql -c "GRANT USAGE, CREATE ON SCHEMA PUBLIC TO {{ mesh_db_user }}"
|
|
||||||
|
|
||||||
- name: create repo dirs
|
|
||||||
become: yes
|
|
||||||
tags: git
|
|
||||||
ansible.builtin.file:
|
|
||||||
path: "{{ item }}"
|
|
||||||
state: directory
|
|
||||||
owner: "{{ user }}"
|
|
||||||
group: "{{ user }}"
|
|
||||||
mode: "0755"
|
|
||||||
with_items:
|
|
||||||
- "{{ backend_dir }}"
|
|
||||||
- "{{ frontend_dir }}"
|
|
||||||
- "{{ scripts_dir }}"
|
|
||||||
|
|
||||||
- name: git clone repos
|
|
||||||
tags: git
|
|
||||||
ansible.builtin.git:
|
|
||||||
repo: "{{ item.repo }}"
|
|
||||||
dest: "{{ item.dest }}"
|
|
||||||
version: "{{ item.version }}"
|
|
||||||
with_items:
|
|
||||||
- {
|
|
||||||
repo: "{{ backend_repo }}",
|
|
||||||
dest: "{{ backend_dir }}",
|
|
||||||
version: develop,
|
|
||||||
}
|
|
||||||
- {
|
|
||||||
repo: "{{ frontend_repo }}",
|
|
||||||
dest: "{{ frontend_dir }}",
|
|
||||||
version: develop,
|
|
||||||
}
|
|
||||||
- { repo: "{{ scripts_repo }}", dest: "{{ scripts_dir }}", version: main }
|
|
||||||
|
|
||||||
- name: get nats_server_ver
|
|
||||||
tags: nats
|
|
||||||
ansible.builtin.shell: grep "^NATS_SERVER_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}'
|
|
||||||
register: nats_server_ver
|
|
||||||
|
|
||||||
- name: Create nats tmpdir
|
|
||||||
tags: nats
|
|
||||||
ansible.builtin.tempfile:
|
|
||||||
state: directory
|
|
||||||
suffix: nats
|
|
||||||
register: nats_tmp
|
|
||||||
|
|
||||||
- name: download and extract nats
|
|
||||||
tags: nats
|
|
||||||
ansible.builtin.unarchive:
|
|
||||||
src: "https://github.com/nats-io/nats-server/releases/download/v{{ nats_server_ver.stdout }}/nats-server-v{{ nats_server_ver.stdout }}-linux-{{ goarch }}.tar.gz"
|
|
||||||
dest: "{{ nats_tmp.path }}"
|
|
||||||
remote_src: yes
|
|
||||||
|
|
||||||
- name: install nats
|
|
||||||
tags: nats
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.copy:
|
|
||||||
remote_src: yes
|
|
||||||
src: "{{ nats_tmp.path }}/nats-server-v{{ nats_server_ver.stdout }}-linux-{{ goarch }}/nats-server"
|
|
||||||
dest: /usr/local/bin/nats-server
|
|
||||||
owner: "{{ user }}"
|
|
||||||
group: "{{ user }}"
|
|
||||||
mode: "0755"
|
|
||||||
|
|
||||||
- name: Create nodejs tmpdir
|
|
||||||
tags: nodejs
|
|
||||||
ansible.builtin.tempfile:
|
|
||||||
state: directory
|
|
||||||
suffix: nodejs
|
|
||||||
register: nodejs_tmp
|
|
||||||
|
|
||||||
- name: download nodejs setup
|
|
||||||
tags: nodejs
|
|
||||||
ansible.builtin.get_url:
|
|
||||||
url: https://deb.nodesource.com/setup_18.x
|
|
||||||
dest: "{{ nodejs_tmp.path }}/setup_node.sh"
|
|
||||||
mode: "0755"
|
|
||||||
|
|
||||||
- name: run node setup script
|
|
||||||
tags: nodejs
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.command:
|
|
||||||
cmd: "{{ nodejs_tmp.path }}/setup_node.sh"
|
|
||||||
|
|
||||||
- name: install nodejs
|
|
||||||
tags: nodejs
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.apt:
|
|
||||||
pkg: nodejs
|
|
||||||
state: present
|
|
||||||
update_cache: yes
|
|
||||||
|
|
||||||
- name: update npm
|
|
||||||
tags: nodejs
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.shell:
|
|
||||||
cmd: npm install -g npm
|
|
||||||
|
|
||||||
- name: install quasar cli
|
|
||||||
tags: quasar
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.shell:
|
|
||||||
cmd: npm install -g @quasar/cli
|
|
||||||
|
|
||||||
- name: install frontend
|
|
||||||
tags: quasar
|
|
||||||
ansible.builtin.shell:
|
|
||||||
chdir: "{{ frontend_dir }}"
|
|
||||||
cmd: npm install
|
|
||||||
|
|
||||||
- name: add quasar env
|
|
||||||
tags: quasar
|
|
||||||
ansible.builtin.template:
|
|
||||||
src: quasar.env.j2
|
|
||||||
dest: "{{ frontend_dir }}/.env"
|
|
||||||
owner: "{{ user }}"
|
|
||||||
group: "{{ user }}"
|
|
||||||
mode: "0644"
|
|
||||||
|
|
||||||
- name: remove tempdirs
|
|
||||||
tags: cleanup
|
|
||||||
become: yes
|
|
||||||
ignore_errors: yes
|
|
||||||
ansible.builtin.file:
|
|
||||||
path: "{{ item }}"
|
|
||||||
state: absent
|
|
||||||
with_items:
|
|
||||||
- "{{ nats_tmp.path }}"
|
|
||||||
- "{{ python_tmp.path }}"
|
|
||||||
- "{{ nodejs_tmp.path }}"
|
|
||||||
|
|
||||||
- name: deploy fullchain
|
|
||||||
tags: certs
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.copy:
|
|
||||||
src: "{{ fullchain_src }}"
|
|
||||||
dest: "{{ fullchain_dest }}"
|
|
||||||
owner: "{{ user }}"
|
|
||||||
group: "{{ user }}"
|
|
||||||
mode: "0440"
|
|
||||||
|
|
||||||
- name: deploy privkey
|
|
||||||
tags: certs
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.copy:
|
|
||||||
src: "{{ privkey_src }}"
|
|
||||||
dest: "{{ privkey_dest }}"
|
|
||||||
owner: "{{ user }}"
|
|
||||||
group: "{{ user }}"
|
|
||||||
mode: "0440"
|
|
||||||
|
|
||||||
- name: import nginx signing key
|
|
||||||
tags: nginx
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.apt_key:
|
|
||||||
url: https://nginx.org/keys/nginx_signing.key
|
|
||||||
state: present
|
|
||||||
|
|
||||||
- name: add nginx repo
|
|
||||||
tags: nginx
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.template:
|
|
||||||
src: nginx.repo.j2
|
|
||||||
dest: /etc/apt/sources.list.d/nginx.list
|
|
||||||
owner: "root"
|
|
||||||
group: "root"
|
|
||||||
mode: "0644"
|
|
||||||
|
|
||||||
- name: install nginx
|
|
||||||
tags: nginx
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.apt:
|
|
||||||
pkg: nginx
|
|
||||||
state: present
|
|
||||||
update_cache: yes
|
|
||||||
|
|
||||||
- name: set nginx default conf
|
|
||||||
tags: nginx
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.copy:
|
|
||||||
src: nginx-default.conf
|
|
||||||
dest: /etc/nginx/nginx.conf
|
|
||||||
owner: "root"
|
|
||||||
group: "root"
|
|
||||||
mode: "0644"
|
|
||||||
|
|
||||||
- name: create nginx dirs
|
|
||||||
become: yes
|
|
||||||
tags: nginx
|
|
||||||
ansible.builtin.file:
|
|
||||||
state: directory
|
|
||||||
path: "{{ item }}"
|
|
||||||
mode: "0755"
|
|
||||||
with_items:
|
|
||||||
- /etc/nginx/sites-available
|
|
||||||
- /etc/nginx/sites-enabled
|
|
||||||
|
|
||||||
- name: deploy nginx sites
|
|
||||||
become: yes
|
|
||||||
tags: nginx
|
|
||||||
ansible.builtin.template:
|
|
||||||
src: "{{ item.src }}"
|
|
||||||
dest: "{{ item.dest }}"
|
|
||||||
mode: "0644"
|
|
||||||
owner: root
|
|
||||||
group: root
|
|
||||||
with_items:
|
|
||||||
- { src: backend.nginx.j2, dest: /etc/nginx/sites-available/backend.conf }
|
|
||||||
- { src: mesh.nginx.j2, dest: /etc/nginx/sites-available/mesh.conf }
|
|
||||||
|
|
||||||
- name: enable nginx sites
|
|
||||||
become: yes
|
|
||||||
tags: nginx
|
|
||||||
ansible.builtin.file:
|
|
||||||
src: "{{ item.src }}"
|
|
||||||
dest: "{{ item.dest }}"
|
|
||||||
mode: "0644"
|
|
||||||
owner: root
|
|
||||||
group: root
|
|
||||||
state: link
|
|
||||||
with_items:
|
|
||||||
- {
|
|
||||||
src: /etc/nginx/sites-available/backend.conf,
|
|
||||||
dest: /etc/nginx/sites-enabled/backend.conf,
|
|
||||||
}
|
|
||||||
- {
|
|
||||||
src: /etc/nginx/sites-available/mesh.conf,
|
|
||||||
dest: /etc/nginx/sites-enabled/mesh.conf,
|
|
||||||
}
|
|
||||||
|
|
||||||
- name: ensure nginx enabled and restarted
|
|
||||||
tags: nginx
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.service:
|
|
||||||
name: nginx
|
|
||||||
enabled: yes
|
|
||||||
state: restarted
|
|
||||||
|
|
||||||
- name: set natsapi fact
|
|
||||||
ansible.builtin.set_fact:
|
|
||||||
natsapi: "{{ 'nats-api' if ansible_architecture == 'x86_64' else 'nats-api-arm64' }}"
|
|
||||||
|
|
||||||
- name: copy nats-api bin
|
|
||||||
tags: nats-api
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.copy:
|
|
||||||
remote_src: yes
|
|
||||||
src: "{{ backend_dir }}/natsapi/bin/{{ natsapi }}"
|
|
||||||
dest: /usr/local/bin/nats-api
|
|
||||||
owner: "{{ user }}"
|
|
||||||
group: "{{ user }}"
|
|
||||||
mode: "0755"
|
|
||||||
|
|
||||||
- name: get setuptools_ver
|
|
||||||
tags: pip
|
|
||||||
ansible.builtin.shell: grep "^SETUPTOOLS_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}'
|
|
||||||
register: setuptools_ver
|
|
||||||
|
|
||||||
- name: get wheel_ver
|
|
||||||
tags: pip
|
|
||||||
ansible.builtin.shell: grep "^WHEEL_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}'
|
|
||||||
register: wheel_ver
|
|
||||||
|
|
||||||
- name: setup virtual env
|
|
||||||
tags: pip
|
|
||||||
ansible.builtin.shell:
|
|
||||||
chdir: "{{ backend_dir }}/api"
|
|
||||||
cmd: python3.11 -m venv env
|
|
||||||
|
|
||||||
- name: update pip to latest
|
|
||||||
tags: pip
|
|
||||||
ansible.builtin.pip:
|
|
||||||
virtualenv: "{{ backend_dir }}/api/env"
|
|
||||||
name: pip
|
|
||||||
state: latest
|
|
||||||
|
|
||||||
- name: install setuptools and wheel
|
|
||||||
tags: pip
|
|
||||||
ansible.builtin.pip:
|
|
||||||
virtualenv: "{{ backend_dir }}/api/env"
|
|
||||||
name: "{{ item }}"
|
|
||||||
with_items:
|
|
||||||
- "setuptools=={{ setuptools_ver.stdout }}"
|
|
||||||
- "wheel=={{ wheel_ver.stdout }}"
|
|
||||||
|
|
||||||
- name: install python packages
|
|
||||||
tags: pip
|
|
||||||
ansible.builtin.pip:
|
|
||||||
virtualenv: "{{ backend_dir }}/api/env"
|
|
||||||
chdir: "{{ backend_dir }}/api/tacticalrmm"
|
|
||||||
requirements: "{{ item }}"
|
|
||||||
with_items:
|
|
||||||
- requirements.txt
|
|
||||||
- requirements-dev.txt
|
|
||||||
- requirements-test.txt
|
|
||||||
|
|
||||||
- name: deploy django local settings
|
|
||||||
tags: django
|
|
||||||
ansible.builtin.template:
|
|
||||||
src: local_settings.j2
|
|
||||||
dest: "{{ local_settings_file }}"
|
|
||||||
mode: "0644"
|
|
||||||
owner: "{{ user }}"
|
|
||||||
group: "{{ user }}"
|
|
||||||
|
|
||||||
- name: setup django
|
|
||||||
tags: django
|
|
||||||
ansible.builtin.shell:
|
|
||||||
chdir: "{{ backend_dir }}/api/tacticalrmm"
|
|
||||||
cmd: |
|
|
||||||
. ../env/bin/activate
|
|
||||||
python manage.py migrate --no-input
|
|
||||||
python manage.py collectstatic --no-input
|
|
||||||
python manage.py create_natsapi_conf
|
|
||||||
python manage.py load_chocos
|
|
||||||
python manage.py load_community_scripts
|
|
||||||
echo "from accounts.models import User; User.objects.create_superuser('{{ django_user }}', '{{ github_email }}', '{{ django_password }}') if not User.objects.filter(username='{{ django_user }}').exists() else 0;" | python manage.py shell
|
|
||||||
python manage.py create_installer_user
|
|
||||||
|
|
||||||
- name: deploy services
|
|
||||||
tags: services
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.template:
|
|
||||||
src: "{{ item.src }}"
|
|
||||||
dest: "{{ item.dest }}"
|
|
||||||
mode: "0644"
|
|
||||||
owner: "root"
|
|
||||||
group: "root"
|
|
||||||
with_items:
|
|
||||||
- { src: nats-api.systemd.j2, dest: /etc/systemd/system/nats-api.service }
|
|
||||||
- { src: nats-server.systemd.j2, dest: /etc/systemd/system/nats.service }
|
|
||||||
- { src: mesh.systemd.j2, dest: /etc/systemd/system/meshcentral.service }
|
|
||||||
|
|
||||||
- name: get mesh_ver
|
|
||||||
tags: mesh
|
|
||||||
ansible.builtin.shell: grep "^MESH_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}'
|
|
||||||
register: mesh_ver
|
|
||||||
|
|
||||||
- name: create meshcentral data directory
|
|
||||||
tags: mesh
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.file:
|
|
||||||
path: "{{ mesh_dir }}/meshcentral-data"
|
|
||||||
state: directory
|
|
||||||
owner: "{{ user }}"
|
|
||||||
group: "{{ user }}"
|
|
||||||
mode: "0755"
|
|
||||||
|
|
||||||
- name: install meshcentral
|
|
||||||
tags: mesh
|
|
||||||
ansible.builtin.command:
|
|
||||||
chdir: "{{ mesh_dir }}"
|
|
||||||
cmd: "npm install meshcentral@{{ mesh_ver.stdout }}"
|
|
||||||
|
|
||||||
- name: deploy mesh config
|
|
||||||
tags: mesh
|
|
||||||
ansible.builtin.template:
|
|
||||||
src: mesh.cfg.j2
|
|
||||||
dest: "{{ mesh_dir }}/meshcentral-data/config.json"
|
|
||||||
mode: "0644"
|
|
||||||
owner: "{{ user }}"
|
|
||||||
group: "{{ user }}"
|
|
||||||
|
|
||||||
- name: start meshcentral
|
|
||||||
tags: mesh
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.systemd:
|
|
||||||
name: meshcentral.service
|
|
||||||
state: started
|
|
||||||
enabled: yes
|
|
||||||
daemon_reload: yes
|
|
||||||
|
|
||||||
- name: wait for meshcentral to be ready
|
|
||||||
tags: mesh
|
|
||||||
uri:
|
|
||||||
url: "https://{{ mesh }}"
|
|
||||||
return_content: yes
|
|
||||||
validate_certs: yes
|
|
||||||
status_code: 200
|
|
||||||
register: mesh_status
|
|
||||||
until: mesh_status.status == 200
|
|
||||||
retries: 20
|
|
||||||
delay: 3
|
|
||||||
|
|
||||||
- name: get meshcentral login token key
|
|
||||||
tags: mesh_key
|
|
||||||
ansible.builtin.command:
|
|
||||||
chdir: "{{ mesh_dir }}"
|
|
||||||
cmd: node node_modules/meshcentral --logintokenkey
|
|
||||||
register: mesh_token_key
|
|
||||||
|
|
||||||
- name: add mesh key to django settings file
|
|
||||||
tags: mesh_key
|
|
||||||
ansible.builtin.lineinfile:
|
|
||||||
path: "{{ local_settings_file }}"
|
|
||||||
line: 'MESH_TOKEN_KEY = "{{ mesh_token_key.stdout }}"'
|
|
||||||
|
|
||||||
- name: stop meshcentral service
|
|
||||||
tags: mesh_user
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.service:
|
|
||||||
name: meshcentral.service
|
|
||||||
state: stopped
|
|
||||||
|
|
||||||
- name: create mesh user
|
|
||||||
tags: mesh_user
|
|
||||||
ansible.builtin.shell:
|
|
||||||
chdir: "{{ mesh_dir }}"
|
|
||||||
cmd: |
|
|
||||||
node node_modules/meshcentral --createaccount {{ mesh_user }} --pass {{ mesh_password }} --email {{ github_email }}
|
|
||||||
node node_modules/meshcentral --adminaccount {{ mesh_user }}
|
|
||||||
|
|
||||||
- name: start meshcentral service
|
|
||||||
tags: mesh_user
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.service:
|
|
||||||
name: meshcentral.service
|
|
||||||
state: started
|
|
||||||
|
|
||||||
- name: wait for meshcentral to be ready
|
|
||||||
tags: mesh_user
|
|
||||||
uri:
|
|
||||||
url: "https://{{ mesh }}"
|
|
||||||
return_content: yes
|
|
||||||
validate_certs: yes
|
|
||||||
status_code: 200
|
|
||||||
register: mesh_status
|
|
||||||
until: mesh_status.status == 200
|
|
||||||
retries: 20
|
|
||||||
delay: 3
|
|
||||||
|
|
||||||
- name: create mesh device group
|
|
||||||
tags: mesh_user
|
|
||||||
ansible.builtin.shell:
|
|
||||||
chdir: "{{ mesh_dir }}"
|
|
||||||
cmd: |
|
|
||||||
node node_modules/meshcentral/meshctrl.js --url wss://{{ mesh }}:443 --loginuser {{ mesh_user }} --loginpass {{ mesh_password }} AddDeviceGroup --name TacticalRMM
|
|
||||||
|
|
||||||
- name: finish up django
|
|
||||||
tags: mesh_user
|
|
||||||
ansible.builtin.shell:
|
|
||||||
chdir: "{{ backend_dir }}/api/tacticalrmm"
|
|
||||||
cmd: |
|
|
||||||
. ../env/bin/activate
|
|
||||||
python manage.py initial_db_setup
|
|
||||||
python manage.py reload_nats
|
|
||||||
|
|
||||||
- name: restart services
|
|
||||||
tags: services
|
|
||||||
become: yes
|
|
||||||
ansible.builtin.systemd:
|
|
||||||
daemon_reload: yes
|
|
||||||
enabled: yes
|
|
||||||
state: restarted
|
|
||||||
name: "{{ item }}.service"
|
|
||||||
with_items:
|
|
||||||
- nats
|
|
||||||
- nats-api
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
server {
|
|
||||||
listen 443 ssl reuseport;
|
|
||||||
listen [::]:443 ssl;
|
|
||||||
server_name {{ api }};
|
|
||||||
client_max_body_size 300M;
|
|
||||||
ssl_certificate {{ fullchain_dest }};
|
|
||||||
ssl_certificate_key {{ privkey_dest }};
|
|
||||||
|
|
||||||
|
|
||||||
location ~ ^/natsws {
|
|
||||||
proxy_pass http://127.0.0.1:9235;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection "upgrade";
|
|
||||||
proxy_set_header X-Forwarded-Host $host:$server_port;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
SECRET_KEY = "{{ django_secret }}"
|
|
||||||
DEBUG = True
|
|
||||||
ALLOWED_HOSTS = ['{{ api }}']
|
|
||||||
ADMIN_URL = "admin/"
|
|
||||||
CORS_ORIGIN_ALLOW_ALL = True
|
|
||||||
DATABASES = {
|
|
||||||
'default': {
|
|
||||||
'ENGINE': 'django.db.backends.postgresql',
|
|
||||||
'NAME': 'tacticalrmm',
|
|
||||||
'USER': '{{ db_user }}',
|
|
||||||
'PASSWORD': '{{ db_passwd }}',
|
|
||||||
'HOST': 'localhost',
|
|
||||||
'PORT': '5432',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ADMIN_ENABLED = True
|
|
||||||
CERT_FILE = "{{ fullchain_dest }}"
|
|
||||||
KEY_FILE = "{{ privkey_dest }}"
|
|
||||||
MESH_USERNAME = "{{ mesh_user }}"
|
|
||||||
MESH_SITE = "https://{{ mesh }}"
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
{
|
|
||||||
"settings": {
|
|
||||||
"Cert": "{{ mesh }}",
|
|
||||||
"WANonly": true,
|
|
||||||
"Minify": 1,
|
|
||||||
"Port": 4430,
|
|
||||||
"AliasPort": 443,
|
|
||||||
"RedirPort": 800,
|
|
||||||
"AllowLoginToken": true,
|
|
||||||
"AllowFraming": true,
|
|
||||||
"AgentPing": 35,
|
|
||||||
"AllowHighQualityDesktop": true,
|
|
||||||
"TlsOffload": "127.0.0.1",
|
|
||||||
"agentCoreDump": false,
|
|
||||||
"Compression": true,
|
|
||||||
"WsCompression": true,
|
|
||||||
"AgentWsCompression": true,
|
|
||||||
"MaxInvalidLogin": { "time": 5, "count": 5, "coolofftime": 30 },
|
|
||||||
"postgres": {
|
|
||||||
"user": "{{ mesh_db_user }}",
|
|
||||||
"password": "{{ mesh_db_passwd }}",
|
|
||||||
"port": "5432",
|
|
||||||
"host": "localhost"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"domains": {
|
|
||||||
"": {
|
|
||||||
"Title": "Tactical RMM Dev",
|
|
||||||
"Title2": "Tactical RMM Dev",
|
|
||||||
"NewAccounts": false,
|
|
||||||
"CertUrl": "https://{{ mesh }}:443/",
|
|
||||||
"GeoLocation": true,
|
|
||||||
"CookieIpCheck": false,
|
|
||||||
"mstsc": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
server {
|
|
||||||
listen 443 ssl;
|
|
||||||
listen [::]:443 ssl;
|
|
||||||
proxy_send_timeout 330s;
|
|
||||||
proxy_read_timeout 330s;
|
|
||||||
server_name {{ mesh }};
|
|
||||||
ssl_certificate {{ fullchain_dest }};
|
|
||||||
ssl_certificate_key {{ privkey_dest }};
|
|
||||||
|
|
||||||
ssl_session_cache shared:WEBSSL:10m;
|
|
||||||
|
|
||||||
location / {
|
|
||||||
proxy_pass http://127.0.0.1:4430/;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection "upgrade";
|
|
||||||
proxy_set_header X-Forwarded-Host $host:$server_port;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=MeshCentral Server
|
|
||||||
After=network.target postgresql.service nginx.service
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
LimitNOFILE=1000000
|
|
||||||
ExecStart=/usr/bin/node node_modules/meshcentral
|
|
||||||
Environment=NODE_ENV=production
|
|
||||||
WorkingDirectory={{ mesh_dir }}
|
|
||||||
User={{ user }}
|
|
||||||
Group={{ user }}
|
|
||||||
Restart=always
|
|
||||||
RestartSec=10s
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=TacticalRMM Nats Api
|
|
||||||
After=nats.service
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
ExecStart=/usr/local/bin/nats-api -config {{ backend_dir }}/api/tacticalrmm/nats-api.conf
|
|
||||||
User={{ user }}
|
|
||||||
Group={{ user }}
|
|
||||||
Restart=always
|
|
||||||
RestartSec=5s
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=NATS Server
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
PrivateTmp=true
|
|
||||||
Type=simple
|
|
||||||
ExecStart=/usr/local/bin/nats-server -c {{ backend_dir }}/api/tacticalrmm/nats-rmm.conf
|
|
||||||
ExecReload=/usr/bin/kill -s HUP $MAINPID
|
|
||||||
ExecStop=/usr/bin/kill -s SIGINT $MAINPID
|
|
||||||
User={{ user }}
|
|
||||||
Group={{ user }}
|
|
||||||
Restart=always
|
|
||||||
RestartSec=5s
|
|
||||||
LimitNOFILE=1000000
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
deb https://nginx.org/packages/debian/ {{ ansible_distribution_release }} nginx
|
|
||||||
deb-src https://nginx.org/packages/debian/ {{ ansible_distribution_release }} nginx
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
DEV_URL = "http://{{ api }}:8000"
|
|
||||||
DEV_HOST = "0.0.0.0"
|
|
||||||
DEV_PORT = "8080"
|
|
||||||
USE_HTTPS = false
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
---
|
|
||||||
- hosts: "{{ target }}"
|
|
||||||
vars:
|
|
||||||
ansible_user: tactical
|
|
||||||
fullchain_src: /path/to/fullchain.pem
|
|
||||||
privkey_src: /path/to/privkey.pem
|
|
||||||
api: "api.example.com"
|
|
||||||
rmm: "rmm.example.com"
|
|
||||||
mesh: "mesh.example.com"
|
|
||||||
github_username: "changeme"
|
|
||||||
github_email: "changeme@example.com"
|
|
||||||
mesh_user: "changeme"
|
|
||||||
mesh_password: "changeme"
|
|
||||||
db_user: "changeme"
|
|
||||||
db_passwd: "changeme"
|
|
||||||
mesh_db_user: "changeme"
|
|
||||||
mesh_db_passwd: "changeme"
|
|
||||||
django_secret: "changeme"
|
|
||||||
django_user: "changeme"
|
|
||||||
django_password: "changeme"
|
|
||||||
roles:
|
|
||||||
- trmm_dev
|
|
||||||
@@ -1,15 +1,26 @@
|
|||||||
[run]
|
[run]
|
||||||
include = *.py
|
source = .
|
||||||
omit =
|
|
||||||
tacticalrmm/asgi.py
|
|
||||||
tacticalrmm/wsgi.py
|
|
||||||
manage.py
|
|
||||||
*/__pycache__/*
|
|
||||||
*/env/*
|
|
||||||
*/baker_recipes.py
|
|
||||||
/usr/local/lib/*
|
|
||||||
**/migrations/*
|
|
||||||
**/test*.py
|
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
show_missing = True
|
show_missing = True
|
||||||
|
include = *.py
|
||||||
|
omit =
|
||||||
|
*/__pycache__/*
|
||||||
|
*/env/*
|
||||||
|
*/management/*
|
||||||
|
*/migrations/*
|
||||||
|
*/static/*
|
||||||
|
manage.py
|
||||||
|
*/local_settings.py
|
||||||
|
*/apps.py
|
||||||
|
*/admin.py
|
||||||
|
*/celery.py
|
||||||
|
*/wsgi.py
|
||||||
|
*/settings.py
|
||||||
|
*/baker_recipes.py
|
||||||
|
*/urls.py
|
||||||
|
*/tests.py
|
||||||
|
*/test.py
|
||||||
|
checks/utils.py
|
||||||
|
*/asgi.py
|
||||||
|
*/demo_views.py
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
ignore = E501,W503,E722,E203
|
|
||||||
exclude =
|
|
||||||
.mypy*
|
|
||||||
.pytest*
|
|
||||||
.git
|
|
||||||
demo_data.py
|
|
||||||
manage.py
|
|
||||||
*/__pycache__/*
|
|
||||||
*/env/*
|
|
||||||
/usr/local/lib/*
|
|
||||||
**/migrations/*
|
|
||||||
@@ -1,24 +1,22 @@
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
from tacticalrmm.helpers import make_random_password
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "Creates the installer user"
|
help = "Creates the installer user"
|
||||||
|
|
||||||
def handle(self, *args, **kwargs): # type: ignore
|
def handle(self, *args, **kwargs):
|
||||||
self.stdout.write("Checking if installer user has been created...")
|
self.stdout.write("Checking if installer user has been created...")
|
||||||
if User.objects.filter(is_installer_user=True).exists():
|
if User.objects.filter(is_installer_user=True).exists():
|
||||||
self.stdout.write("Installer user already exists")
|
self.stdout.write("Installer user already exists")
|
||||||
return
|
return
|
||||||
|
|
||||||
User.objects.create_user(
|
User.objects.create_user( # type: ignore
|
||||||
username=uuid.uuid4().hex,
|
username=uuid.uuid4().hex,
|
||||||
is_installer_user=True,
|
is_installer_user=True,
|
||||||
password=make_random_password(len=60),
|
password=User.objects.make_random_password(60), # type: ignore
|
||||||
block_dashboard_login=True,
|
block_dashboard_login=True,
|
||||||
)
|
)
|
||||||
self.stdout.write("Installer user has been created")
|
self.stdout.write("Installer user has been created")
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from knox.models import AuthToken
|
|||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "Deletes all knox web tokens"
|
help = "Deletes all knox web tokens"
|
||||||
|
|
||||||
def handle(self, *args, **kwargs): # type: ignore
|
def handle(self, *args, **kwargs):
|
||||||
# only delete web tokens, not any generated by the installer or deployments
|
# only delete web tokens, not any generated by the installer or deployments
|
||||||
dont_delete = djangotime.now() + djangotime.timedelta(hours=23)
|
dont_delete = djangotime.now() + djangotime.timedelta(hours=23)
|
||||||
tokens = AuthToken.objects.exclude(deploytokens__isnull=False).filter(
|
tokens = AuthToken.objects.exclude(deploytokens__isnull=False).filter(
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import pyotp
|
import pyotp
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import pyotp
|
import pyotp
|
||||||
from django.conf import settings
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
from tacticalrmm.util_settings import get_webdomain
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
@@ -22,13 +20,28 @@ class Command(BaseCommand):
|
|||||||
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||||
return
|
return
|
||||||
|
|
||||||
|
domain = "Tactical RMM"
|
||||||
|
nginx = "/etc/nginx/sites-available/frontend.conf"
|
||||||
|
found = None
|
||||||
|
if os.path.exists(nginx):
|
||||||
|
try:
|
||||||
|
with open(nginx, "r") as f:
|
||||||
|
for line in f:
|
||||||
|
if "server_name" in line:
|
||||||
|
found = line
|
||||||
|
break
|
||||||
|
|
||||||
|
if found:
|
||||||
|
rep = found.replace("server_name", "").replace(";", "")
|
||||||
|
domain = "".join(rep.split())
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
code = pyotp.random_base32()
|
code = pyotp.random_base32()
|
||||||
user.totp_key = code
|
user.totp_key = code
|
||||||
user.save(update_fields=["totp_key"])
|
user.save(update_fields=["totp_key"])
|
||||||
|
|
||||||
url = pyotp.totp.TOTP(code).provisioning_uri(
|
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
||||||
username, issuer_name=get_webdomain(settings.CORS_ORIGIN_WHITELIST[0])
|
|
||||||
)
|
|
||||||
subprocess.run(f'qr "{url}"', shell=True)
|
subprocess.run(f'qr "{url}"', shell=True)
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.WARNING("Scan the barcode above with your authenticator app")
|
self.style.WARNING("Scan the barcode above with your authenticator app")
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
from getpass import getpass
|
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
@@ -19,13 +16,7 @@ class Command(BaseCommand):
|
|||||||
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||||
return
|
return
|
||||||
|
|
||||||
pass1, pass2 = "foo", "bar"
|
passwd = input("Enter new password: ")
|
||||||
while pass1 != pass2:
|
user.set_password(passwd)
|
||||||
pass1 = getpass()
|
|
||||||
pass2 = getpass(prompt="Confirm Password:")
|
|
||||||
if pass1 != pass2:
|
|
||||||
self.stdout.write(self.style.ERROR("Passwords don't match"))
|
|
||||||
|
|
||||||
user.set_password(pass1)
|
|
||||||
user.save()
|
user.save()
|
||||||
self.stdout.write(self.style.SUCCESS(f"Password for {username} was reset!"))
|
self.stdout.write(self.style.SUCCESS(f"Password for {username} was reset!"))
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
# Generated by Django 4.2.1 on 2023-05-17 07:11
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("accounts", "0031_user_date_format"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="user",
|
|
||||||
name="default_agent_tbl_tab",
|
|
||||||
field=models.CharField(
|
|
||||||
choices=[
|
|
||||||
("server", "Servers"),
|
|
||||||
("workstation", "Workstations"),
|
|
||||||
("mixed", "Mixed"),
|
|
||||||
],
|
|
||||||
default="mixed",
|
|
||||||
max_length=50,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
# Generated by Django 4.2.1 on 2023-05-23 04:54
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("accounts", "0032_alter_user_default_agent_tbl_tab"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="user",
|
|
||||||
name="dash_info_color",
|
|
||||||
field=models.CharField(default="info", max_length=255),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="user",
|
|
||||||
name="dash_negative_color",
|
|
||||||
field=models.CharField(default="negative", max_length=255),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="user",
|
|
||||||
name="dash_positive_color",
|
|
||||||
field=models.CharField(default="positive", max_length=255),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="user",
|
|
||||||
name="dash_warning_color",
|
|
||||||
field=models.CharField(default="warning", max_length=255),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
# Generated by Django 4.1.9 on 2023-05-26 23:59
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("accounts", "0033_user_dash_info_color_user_dash_negative_color_and_more"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="role",
|
|
||||||
name="can_send_wol",
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
# Generated by Django 4.2.5 on 2023-10-08 22:24
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("accounts", "0034_role_can_send_wol"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="role",
|
|
||||||
name="can_manage_reports",
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="role",
|
|
||||||
name="can_view_reports",
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
# Generated by Django 4.2.7 on 2023-11-09 19:57
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("accounts", "0035_role_can_manage_reports_role_can_view_reports"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name="role",
|
|
||||||
name="can_ping_agents",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 4.2.13 on 2024-06-28 20:21
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("accounts", "0036_remove_role_can_ping_agents"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="role",
|
|
||||||
name="can_run_server_scripts",
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="role",
|
|
||||||
name="can_use_webterm",
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 4.2.16 on 2024-10-06 05:44
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("accounts", "0037_role_can_run_server_scripts_role_can_use_webterm"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="role",
|
|
||||||
name="can_edit_global_keystore",
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="role",
|
|
||||||
name="can_view_global_keystore",
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,18 +1,30 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from allauth.socialaccount.models import SocialAccount
|
|
||||||
from django.contrib.auth.models import AbstractUser
|
from django.contrib.auth.models import AbstractUser
|
||||||
from django.core.cache import cache
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models.fields import CharField, DateTimeField
|
from django.db.models.fields import CharField, DateTimeField
|
||||||
|
from django.core.cache import cache
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
from tacticalrmm.constants import (
|
|
||||||
ROLE_CACHE_PREFIX,
|
from typing import Optional
|
||||||
AgentDblClick,
|
|
||||||
AgentTableTabs,
|
from tacticalrmm.constants import ROLE_CACHE_PREFIX
|
||||||
ClientTreeSort,
|
|
||||||
)
|
AGENT_DBLCLICK_CHOICES = [
|
||||||
|
("editagent", "Edit Agent"),
|
||||||
|
("takecontrol", "Take Control"),
|
||||||
|
("remotebg", "Remote Background"),
|
||||||
|
("urlaction", "URL Action"),
|
||||||
|
]
|
||||||
|
|
||||||
|
AGENT_TBL_TAB_CHOICES = [
|
||||||
|
("server", "Servers"),
|
||||||
|
("workstation", "Workstations"),
|
||||||
|
("mixed", "Mixed"),
|
||||||
|
]
|
||||||
|
|
||||||
|
CLIENT_TREE_SORT_CHOICES = [
|
||||||
|
("alphafail", "Move failing clients to the top"),
|
||||||
|
("alpha", "Sort alphabetically"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class User(AbstractUser, BaseAuditModel):
|
class User(AbstractUser, BaseAuditModel):
|
||||||
@@ -21,8 +33,8 @@ class User(AbstractUser, BaseAuditModel):
|
|||||||
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
||||||
dark_mode = models.BooleanField(default=True)
|
dark_mode = models.BooleanField(default=True)
|
||||||
show_community_scripts = models.BooleanField(default=True)
|
show_community_scripts = models.BooleanField(default=True)
|
||||||
agent_dblclick_action: "AgentDblClick" = models.CharField(
|
agent_dblclick_action = models.CharField(
|
||||||
max_length=50, choices=AgentDblClick.choices, default=AgentDblClick.EDIT_AGENT
|
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
||||||
)
|
)
|
||||||
url_action = models.ForeignKey(
|
url_action = models.ForeignKey(
|
||||||
"core.URLAction",
|
"core.URLAction",
|
||||||
@@ -32,18 +44,14 @@ class User(AbstractUser, BaseAuditModel):
|
|||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
)
|
)
|
||||||
default_agent_tbl_tab = models.CharField(
|
default_agent_tbl_tab = models.CharField(
|
||||||
max_length=50, choices=AgentTableTabs.choices, default=AgentTableTabs.MIXED
|
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
|
||||||
)
|
)
|
||||||
agents_per_page = models.PositiveIntegerField(default=50) # not currently used
|
agents_per_page = models.PositiveIntegerField(default=50) # not currently used
|
||||||
client_tree_sort = models.CharField(
|
client_tree_sort = models.CharField(
|
||||||
max_length=50, choices=ClientTreeSort.choices, default=ClientTreeSort.ALPHA_FAIL
|
max_length=50, choices=CLIENT_TREE_SORT_CHOICES, default="alphafail"
|
||||||
)
|
)
|
||||||
client_tree_splitter = models.PositiveIntegerField(default=11)
|
client_tree_splitter = models.PositiveIntegerField(default=11)
|
||||||
loading_bar_color = models.CharField(max_length=255, default="red")
|
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||||
dash_info_color = models.CharField(max_length=255, default="info")
|
|
||||||
dash_positive_color = models.CharField(max_length=255, default="positive")
|
|
||||||
dash_negative_color = models.CharField(max_length=255, default="negative")
|
|
||||||
dash_warning_color = models.CharField(max_length=255, default="warning")
|
|
||||||
clear_search_when_switching = models.BooleanField(default=True)
|
clear_search_when_switching = models.BooleanField(default=True)
|
||||||
date_format = models.CharField(max_length=30, blank=True, null=True)
|
date_format = models.CharField(max_length=30, blank=True, null=True)
|
||||||
is_installer_user = models.BooleanField(default=False)
|
is_installer_user = models.BooleanField(default=False)
|
||||||
@@ -65,19 +73,6 @@ class User(AbstractUser, BaseAuditModel):
|
|||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
|
||||||
def mesh_user_id(self):
|
|
||||||
return f"user//{self.mesh_username}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def mesh_username(self):
|
|
||||||
# lower() needed for mesh api
|
|
||||||
return f"{self.username.replace(' ', '').lower()}___{self.pk}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_sso_user(self):
|
|
||||||
return SocialAccount.objects.filter(user_id=self.pk).exists()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(user):
|
def serialize(user):
|
||||||
# serializes the task and returns json
|
# serializes the task and returns json
|
||||||
@@ -109,6 +104,7 @@ class Role(BaseAuditModel):
|
|||||||
|
|
||||||
# agents
|
# agents
|
||||||
can_list_agents = models.BooleanField(default=False)
|
can_list_agents = models.BooleanField(default=False)
|
||||||
|
can_ping_agents = models.BooleanField(default=False)
|
||||||
can_use_mesh = models.BooleanField(default=False)
|
can_use_mesh = models.BooleanField(default=False)
|
||||||
can_uninstall_agents = models.BooleanField(default=False)
|
can_uninstall_agents = models.BooleanField(default=False)
|
||||||
can_update_agents = models.BooleanField(default=False)
|
can_update_agents = models.BooleanField(default=False)
|
||||||
@@ -122,7 +118,6 @@ class Role(BaseAuditModel):
|
|||||||
can_run_bulk = models.BooleanField(default=False)
|
can_run_bulk = models.BooleanField(default=False)
|
||||||
can_recover_agents = models.BooleanField(default=False)
|
can_recover_agents = models.BooleanField(default=False)
|
||||||
can_list_agent_history = models.BooleanField(default=False)
|
can_list_agent_history = models.BooleanField(default=False)
|
||||||
can_send_wol = models.BooleanField(default=False)
|
|
||||||
|
|
||||||
# core
|
# core
|
||||||
can_list_notes = models.BooleanField(default=False)
|
can_list_notes = models.BooleanField(default=False)
|
||||||
@@ -134,10 +129,6 @@ class Role(BaseAuditModel):
|
|||||||
can_run_urlactions = models.BooleanField(default=False)
|
can_run_urlactions = models.BooleanField(default=False)
|
||||||
can_view_customfields = models.BooleanField(default=False)
|
can_view_customfields = models.BooleanField(default=False)
|
||||||
can_manage_customfields = models.BooleanField(default=False)
|
can_manage_customfields = models.BooleanField(default=False)
|
||||||
can_run_server_scripts = models.BooleanField(default=False)
|
|
||||||
can_use_webterm = models.BooleanField(default=False)
|
|
||||||
can_view_global_keystore = models.BooleanField(default=False)
|
|
||||||
can_edit_global_keystore = models.BooleanField(default=False)
|
|
||||||
|
|
||||||
# checks
|
# checks
|
||||||
can_list_checks = models.BooleanField(default=False)
|
can_list_checks = models.BooleanField(default=False)
|
||||||
@@ -203,17 +194,14 @@ class Role(BaseAuditModel):
|
|||||||
can_list_api_keys = models.BooleanField(default=False)
|
can_list_api_keys = models.BooleanField(default=False)
|
||||||
can_manage_api_keys = models.BooleanField(default=False)
|
can_manage_api_keys = models.BooleanField(default=False)
|
||||||
|
|
||||||
# reporting
|
|
||||||
can_view_reports = models.BooleanField(default=False)
|
|
||||||
can_manage_reports = models.BooleanField(default=False)
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
def save(self, *args, **kwargs) -> None:
|
def save(self, *args, **kwargs) -> None:
|
||||||
|
|
||||||
# delete cache on save
|
# delete cache on save
|
||||||
cache.delete(f"{ROLE_CACHE_PREFIX}{self.name}")
|
cache.delete(f"{ROLE_CACHE_PREFIX}{self.name}")
|
||||||
super().save(*args, **kwargs)
|
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(role):
|
def serialize(role):
|
||||||
|
|||||||
@@ -1,17 +1,17 @@
|
|||||||
from rest_framework import permissions
|
from rest_framework import permissions
|
||||||
|
|
||||||
from tacticalrmm.permissions import _has_perm
|
from tacticalrmm.permissions import _has_perm
|
||||||
from tacticalrmm.utils import get_core_settings
|
|
||||||
|
|
||||||
|
|
||||||
class AccountsPerms(permissions.BasePermission):
|
class AccountsPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view) -> bool:
|
def has_permission(self, r, view) -> bool:
|
||||||
if r.method == "GET":
|
if r.method == "GET":
|
||||||
return _has_perm(r, "can_list_accounts")
|
return _has_perm(r, "can_list_accounts")
|
||||||
|
else:
|
||||||
|
|
||||||
# allow users to reset their own password/2fa see issue #686
|
# allow users to reset their own password/2fa see issue #686
|
||||||
base_path = "/accounts/users/"
|
base_path = "/accounts/users/"
|
||||||
paths = ("reset/", "reset_totp/")
|
paths = ["reset/", "reset_totp/"]
|
||||||
|
|
||||||
if r.path in [base_path + i for i in paths]:
|
if r.path in [base_path + i for i in paths]:
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
@@ -31,7 +31,7 @@ class RolesPerms(permissions.BasePermission):
|
|||||||
def has_permission(self, r, view) -> bool:
|
def has_permission(self, r, view) -> bool:
|
||||||
if r.method == "GET":
|
if r.method == "GET":
|
||||||
return _has_perm(r, "can_list_roles")
|
return _has_perm(r, "can_list_roles")
|
||||||
|
else:
|
||||||
return _has_perm(r, "can_manage_roles")
|
return _has_perm(r, "can_manage_roles")
|
||||||
|
|
||||||
|
|
||||||
@@ -41,14 +41,3 @@ class APIKeyPerms(permissions.BasePermission):
|
|||||||
return _has_perm(r, "can_list_api_keys")
|
return _has_perm(r, "can_list_api_keys")
|
||||||
|
|
||||||
return _has_perm(r, "can_manage_api_keys")
|
return _has_perm(r, "can_manage_api_keys")
|
||||||
|
|
||||||
|
|
||||||
class LocalUserPerms(permissions.BasePermission):
|
|
||||||
def has_permission(self, r, view) -> bool:
|
|
||||||
settings = get_core_settings()
|
|
||||||
return not settings.block_local_user_logon
|
|
||||||
|
|
||||||
|
|
||||||
class SelfResetSSOPerms(permissions.BasePermission):
|
|
||||||
def has_permission(self, r, view) -> bool:
|
|
||||||
return not r.user.is_sso_user
|
|
||||||
|
|||||||
@@ -1,13 +1,10 @@
|
|||||||
import pyotp
|
import pyotp
|
||||||
from django.conf import settings
|
|
||||||
from rest_framework.serializers import (
|
from rest_framework.serializers import (
|
||||||
ModelSerializer,
|
ModelSerializer,
|
||||||
ReadOnlyField,
|
ReadOnlyField,
|
||||||
SerializerMethodField,
|
SerializerMethodField,
|
||||||
)
|
)
|
||||||
|
|
||||||
from tacticalrmm.util_settings import get_webdomain
|
|
||||||
|
|
||||||
from .models import APIKey, Role, User
|
from .models import APIKey, Role, User
|
||||||
|
|
||||||
|
|
||||||
@@ -23,10 +20,6 @@ class UserUISerializer(ModelSerializer):
|
|||||||
"client_tree_sort",
|
"client_tree_sort",
|
||||||
"client_tree_splitter",
|
"client_tree_splitter",
|
||||||
"loading_bar_color",
|
"loading_bar_color",
|
||||||
"dash_info_color",
|
|
||||||
"dash_positive_color",
|
|
||||||
"dash_negative_color",
|
|
||||||
"dash_warning_color",
|
|
||||||
"clear_search_when_switching",
|
"clear_search_when_switching",
|
||||||
"block_dashboard_login",
|
"block_dashboard_login",
|
||||||
"date_format",
|
"date_format",
|
||||||
@@ -52,6 +45,7 @@ class UserSerializer(ModelSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class TOTPSetupSerializer(ModelSerializer):
|
class TOTPSetupSerializer(ModelSerializer):
|
||||||
|
|
||||||
qr_url = SerializerMethodField()
|
qr_url = SerializerMethodField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -64,7 +58,7 @@ class TOTPSetupSerializer(ModelSerializer):
|
|||||||
|
|
||||||
def get_qr_url(self, obj):
|
def get_qr_url(self, obj):
|
||||||
return pyotp.totp.TOTP(obj.totp_key).provisioning_uri(
|
return pyotp.totp.TOTP(obj.totp_key).provisioning_uri(
|
||||||
obj.username, issuer_name=get_webdomain(settings.CORS_ORIGIN_WHITELIST[0])
|
obj.username, issuer_name="Tactical RMM"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -86,6 +80,7 @@ class RoleAuditSerializer(ModelSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class APIKeySerializer(ModelSerializer):
|
class APIKeySerializer(ModelSerializer):
|
||||||
|
|
||||||
username = ReadOnlyField(source="user.username")
|
username = ReadOnlyField(source="user.username")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|||||||
@@ -1,30 +1,28 @@
|
|||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.test import override_settings
|
|
||||||
from model_bakery import baker, seq
|
|
||||||
|
|
||||||
from accounts.models import APIKey, User
|
from accounts.models import APIKey, User
|
||||||
from accounts.serializers import APIKeySerializer
|
from accounts.serializers import APIKeySerializer
|
||||||
from tacticalrmm.constants import AgentDblClick, AgentTableTabs, ClientTreeSort
|
from django.test import override_settings
|
||||||
|
from model_bakery import baker, seq
|
||||||
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestAccounts(TacticalTestCase):
|
class TestAccounts(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.setup_coresettings()
|
|
||||||
self.setup_client()
|
self.setup_client()
|
||||||
self.bob = User(username="bob")
|
self.bob = User(username="bob")
|
||||||
self.bob.set_password("hunter2")
|
self.bob.set_password("hunter2")
|
||||||
self.bob.save()
|
self.bob.save()
|
||||||
|
|
||||||
def test_check_creds(self):
|
def test_check_creds(self):
|
||||||
url = "/v2/checkcreds/"
|
url = "/checkcreds/"
|
||||||
|
|
||||||
data = {"username": "bob", "password": "hunter2"}
|
data = {"username": "bob", "password": "hunter2"}
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertIn("totp", r.data.keys())
|
self.assertIn("totp", r.data.keys())
|
||||||
self.assertEqual(r.data["totp"], False)
|
self.assertEqual(r.data["totp"], "totp not set")
|
||||||
|
|
||||||
data = {"username": "bob", "password": "a3asdsa2314"}
|
data = {"username": "bob", "password": "a3asdsa2314"}
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -41,7 +39,7 @@ class TestAccounts(TacticalTestCase):
|
|||||||
data = {"username": "bob", "password": "hunter2"}
|
data = {"username": "bob", "password": "hunter2"}
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data["totp"], True)
|
self.assertEqual(r.data, "ok")
|
||||||
|
|
||||||
# test user set to block dashboard logins
|
# test user set to block dashboard logins
|
||||||
self.bob.block_dashboard_login = True
|
self.bob.block_dashboard_login = True
|
||||||
@@ -51,7 +49,7 @@ class TestAccounts(TacticalTestCase):
|
|||||||
|
|
||||||
@patch("pyotp.TOTP.verify")
|
@patch("pyotp.TOTP.verify")
|
||||||
def test_login_view(self, mock_verify):
|
def test_login_view(self, mock_verify):
|
||||||
url = "/v2/login/"
|
url = "/login/"
|
||||||
|
|
||||||
mock_verify.return_value = True
|
mock_verify.return_value = True
|
||||||
data = {"username": "bob", "password": "hunter2", "twofactor": "123456"}
|
data = {"username": "bob", "password": "hunter2", "twofactor": "123456"}
|
||||||
@@ -71,17 +69,17 @@ class TestAccounts(TacticalTestCase):
|
|||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertIn("non_field_errors", r.data.keys())
|
self.assertIn("non_field_errors", r.data.keys())
|
||||||
|
|
||||||
# @override_settings(DEBUG=True)
|
@override_settings(DEBUG=True)
|
||||||
# @patch("pyotp.TOTP.verify")
|
@patch("pyotp.TOTP.verify")
|
||||||
# def test_debug_login_view(self, mock_verify):
|
def test_debug_login_view(self, mock_verify):
|
||||||
# url = "/login/"
|
url = "/login/"
|
||||||
# mock_verify.return_value = True
|
mock_verify.return_value = True
|
||||||
|
|
||||||
# data = {"username": "bob", "password": "hunter2", "twofactor": "sekret"}
|
data = {"username": "bob", "password": "hunter2", "twofactor": "sekret"}
|
||||||
# r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
# self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
# self.assertIn("expiry", r.data.keys())
|
self.assertIn("expiry", r.data.keys())
|
||||||
# self.assertIn("token", r.data.keys())
|
self.assertIn("token", r.data.keys())
|
||||||
|
|
||||||
|
|
||||||
class TestGetAddUsers(TacticalTestCase):
|
class TestGetAddUsers(TacticalTestCase):
|
||||||
@@ -198,7 +196,7 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
|||||||
r = self.client.delete(url)
|
r = self.client.delete(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
url = "/accounts/893452/users/"
|
url = f"/accounts/893452/users/"
|
||||||
r = self.client.delete(url)
|
r = self.client.delete(url)
|
||||||
self.assertEqual(r.status_code, 404)
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
@@ -285,9 +283,9 @@ class TestUserAction(TacticalTestCase):
|
|||||||
data = {
|
data = {
|
||||||
"dark_mode": True,
|
"dark_mode": True,
|
||||||
"show_community_scripts": True,
|
"show_community_scripts": True,
|
||||||
"agent_dblclick_action": AgentDblClick.EDIT_AGENT,
|
"agent_dblclick_action": "editagent",
|
||||||
"default_agent_tbl_tab": AgentTableTabs.MIXED,
|
"default_agent_tbl_tab": "mixed",
|
||||||
"client_tree_sort": ClientTreeSort.ALPHA,
|
"client_tree_sort": "alpha",
|
||||||
"client_tree_splitter": 14,
|
"client_tree_splitter": 14,
|
||||||
"loading_bar_color": "green",
|
"loading_bar_color": "green",
|
||||||
"clear_search_when_switching": False,
|
"clear_search_when_switching": False,
|
||||||
@@ -298,27 +296,6 @@ class TestUserAction(TacticalTestCase):
|
|||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
|
||||||
class TestUserReset(TacticalTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.authenticate()
|
|
||||||
self.setup_coresettings()
|
|
||||||
|
|
||||||
def test_reset_pw(self):
|
|
||||||
url = "/accounts/resetpw/"
|
|
||||||
data = {"password": "superSekret123456"}
|
|
||||||
r = self.client.put(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
|
||||||
|
|
||||||
def test_reset_2fa(self):
|
|
||||||
url = "/accounts/reset2fa/"
|
|
||||||
r = self.client.put(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
|
||||||
|
|
||||||
|
|
||||||
class TestAPIKeyViews(TacticalTestCase):
|
class TestAPIKeyViews(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.setup_coresettings()
|
self.setup_coresettings()
|
||||||
@@ -361,7 +338,7 @@ class TestAPIKeyViews(TacticalTestCase):
|
|||||||
resp = self.client.put(url, data, format="json")
|
resp = self.client.put(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
apikey = APIKey.objects.get(pk=apikey.pk)
|
apikey = APIKey.objects.get(pk=apikey.pk)
|
||||||
self.assertEqual(apikey.name, "New Name")
|
self.assertEquals(apikey.name, "New Name")
|
||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
@@ -405,7 +382,7 @@ class TestTOTPSetup(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.post(url)
|
r = self.client.post(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data, False)
|
self.assertEqual(r.data, "totp token already set")
|
||||||
|
|
||||||
|
|
||||||
class TestAPIAuthentication(TacticalTestCase):
|
class TestAPIAuthentication(TacticalTestCase):
|
||||||
|
|||||||
@@ -5,10 +5,6 @@ from . import views
|
|||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("users/", views.GetAddUsers.as_view()),
|
path("users/", views.GetAddUsers.as_view()),
|
||||||
path("<int:pk>/users/", views.GetUpdateDeleteUser.as_view()),
|
path("<int:pk>/users/", views.GetUpdateDeleteUser.as_view()),
|
||||||
path("sessions/<str:pk>/", views.DeleteActiveLoginSession.as_view()),
|
|
||||||
path(
|
|
||||||
"users/<int:pk>/sessions/", views.GetDeleteActiveLoginSessionsPerUser.as_view()
|
|
||||||
),
|
|
||||||
path("users/reset/", views.UserActions.as_view()),
|
path("users/reset/", views.UserActions.as_view()),
|
||||||
path("users/reset_totp/", views.UserActions.as_view()),
|
path("users/reset_totp/", views.UserActions.as_view()),
|
||||||
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
||||||
@@ -17,6 +13,4 @@ urlpatterns = [
|
|||||||
path("roles/<int:pk>/", views.GetUpdateDeleteRole.as_view()),
|
path("roles/<int:pk>/", views.GetUpdateDeleteRole.as_view()),
|
||||||
path("apikeys/", views.GetAddAPIKeys.as_view()),
|
path("apikeys/", views.GetAddAPIKeys.as_view()),
|
||||||
path("apikeys/<int:pk>/", views.GetUpdateDeleteAPIKey.as_view()),
|
path("apikeys/<int:pk>/", views.GetUpdateDeleteAPIKey.as_view()),
|
||||||
path("resetpw/", views.ResetPass.as_view()),
|
|
||||||
path("reset2fa/", views.Reset2FA.as_view()),
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,24 +0,0 @@
|
|||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from django.http import HttpRequest
|
|
||||||
|
|
||||||
from accounts.models import User
|
|
||||||
|
|
||||||
|
|
||||||
def is_root_user(*, request: "HttpRequest", user: "User") -> bool:
|
|
||||||
root = (
|
|
||||||
hasattr(settings, "ROOT_USER")
|
|
||||||
and request.user != user
|
|
||||||
and user.username == settings.ROOT_USER
|
|
||||||
)
|
|
||||||
demo = (
|
|
||||||
getattr(settings, "DEMO", False) and request.user.username == settings.ROOT_USER
|
|
||||||
)
|
|
||||||
return root or demo
|
|
||||||
|
|
||||||
|
|
||||||
def is_superuser(user: "User") -> bool:
|
|
||||||
return user.role and getattr(user.role, "is_superuser")
|
|
||||||
@@ -1,39 +1,20 @@
|
|||||||
import datetime
|
|
||||||
|
|
||||||
import pyotp
|
import pyotp
|
||||||
from allauth.socialaccount.models import SocialAccount, SocialApp
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth import login
|
from django.contrib.auth import login
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils import timezone as djangotime
|
from ipware import get_client_ip
|
||||||
from knox.models import AuthToken
|
|
||||||
from knox.views import LoginView as KnoxLoginView
|
from knox.views import LoginView as KnoxLoginView
|
||||||
from python_ipware import IpWare
|
from logs.models import AuditLog
|
||||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import (
|
|
||||||
ModelSerializer,
|
|
||||||
ReadOnlyField,
|
|
||||||
SerializerMethodField,
|
|
||||||
)
|
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from accounts.utils import is_root_user
|
from tacticalrmm.utils import notify_error
|
||||||
from core.tasks import sync_mesh_perms_task
|
|
||||||
from logs.models import AuditLog
|
|
||||||
from tacticalrmm.helpers import notify_error
|
|
||||||
from tacticalrmm.utils import get_core_settings
|
|
||||||
|
|
||||||
from .models import APIKey, Role, User
|
from .models import APIKey, Role, User
|
||||||
from .permissions import (
|
from .permissions import AccountsPerms, APIKeyPerms, RolesPerms
|
||||||
AccountsPerms,
|
|
||||||
APIKeyPerms,
|
|
||||||
LocalUserPerms,
|
|
||||||
RolesPerms,
|
|
||||||
SelfResetSSOPerms,
|
|
||||||
)
|
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
APIKeySerializer,
|
APIKeySerializer,
|
||||||
RoleSerializer,
|
RoleSerializer,
|
||||||
@@ -43,14 +24,24 @@ from .serializers import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class CheckCredsV2(KnoxLoginView):
|
def _is_root_user(request, user) -> bool:
|
||||||
|
root = (
|
||||||
|
hasattr(settings, "ROOT_USER")
|
||||||
|
and request.user != user
|
||||||
|
and user.username == settings.ROOT_USER
|
||||||
|
)
|
||||||
|
demo = (
|
||||||
|
getattr(settings, "DEMO", False) and request.user.username == settings.ROOT_USER
|
||||||
|
)
|
||||||
|
return root or demo
|
||||||
|
|
||||||
|
|
||||||
|
class CheckCreds(KnoxLoginView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
|
|
||||||
# restrict time on tokens issued by this view to 3 min
|
|
||||||
def get_token_ttl(self):
|
|
||||||
return datetime.timedelta(seconds=180)
|
|
||||||
|
|
||||||
def post(self, request, format=None):
|
def post(self, request, format=None):
|
||||||
|
|
||||||
# check credentials
|
# check credentials
|
||||||
serializer = AuthTokenSerializer(data=request.data)
|
serializer = AuthTokenSerializer(data=request.data)
|
||||||
if not serializer.is_valid():
|
if not serializer.is_valid():
|
||||||
@@ -61,25 +52,21 @@ class CheckCredsV2(KnoxLoginView):
|
|||||||
|
|
||||||
user = serializer.validated_data["user"]
|
user = serializer.validated_data["user"]
|
||||||
|
|
||||||
if user.block_dashboard_login or user.is_sso_user:
|
if user.block_dashboard_login:
|
||||||
return notify_error("Bad credentials")
|
|
||||||
|
|
||||||
# block local logon if configured
|
|
||||||
core_settings = get_core_settings()
|
|
||||||
if not user.is_superuser and core_settings.block_local_user_logon:
|
|
||||||
return notify_error("Bad credentials")
|
return notify_error("Bad credentials")
|
||||||
|
|
||||||
# if totp token not set modify response to notify frontend
|
# if totp token not set modify response to notify frontend
|
||||||
if not user.totp_key:
|
if not user.totp_key:
|
||||||
login(request, user)
|
login(request, user)
|
||||||
response = super().post(request, format=None)
|
response = super(CheckCreds, self).post(request, format=None)
|
||||||
response.data["totp"] = False
|
response.data["totp"] = "totp not set"
|
||||||
return response
|
return response
|
||||||
|
|
||||||
return Response({"totp": True})
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class LoginViewV2(KnoxLoginView):
|
class LoginView(KnoxLoginView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
|
|
||||||
def post(self, request, format=None):
|
def post(self, request, format=None):
|
||||||
@@ -92,14 +79,6 @@ class LoginViewV2(KnoxLoginView):
|
|||||||
if user.block_dashboard_login:
|
if user.block_dashboard_login:
|
||||||
return notify_error("Bad credentials")
|
return notify_error("Bad credentials")
|
||||||
|
|
||||||
# block local logon if configured
|
|
||||||
core_settings = get_core_settings()
|
|
||||||
if not user.is_superuser and core_settings.block_local_user_logon:
|
|
||||||
return notify_error("Bad credentials")
|
|
||||||
|
|
||||||
if user.is_sso_user:
|
|
||||||
return notify_error("Bad credentials")
|
|
||||||
|
|
||||||
token = request.data["twofactor"]
|
token = request.data["twofactor"]
|
||||||
totp = pyotp.TOTP(user.totp_key)
|
totp = pyotp.TOTP(user.totp_key)
|
||||||
|
|
||||||
@@ -114,20 +93,14 @@ class LoginViewV2(KnoxLoginView):
|
|||||||
login(request, user)
|
login(request, user)
|
||||||
|
|
||||||
# save ip information
|
# save ip information
|
||||||
ipw = IpWare()
|
client_ip, is_routable = get_client_ip(request)
|
||||||
client_ip, _ = ipw.get_client_ip(request.META)
|
user.last_login_ip = client_ip
|
||||||
if client_ip:
|
|
||||||
user.last_login_ip = str(client_ip)
|
|
||||||
user.save()
|
user.save()
|
||||||
|
|
||||||
AuditLog.audit_user_login_successful(
|
AuditLog.audit_user_login_successful(
|
||||||
request.data["username"], debug_info={"ip": request._client_ip}
|
request.data["username"], debug_info={"ip": request._client_ip}
|
||||||
)
|
)
|
||||||
response = super().post(request, format=None)
|
return super(LoginView, self).post(request, format=None)
|
||||||
response.data["username"] = request.user.username
|
|
||||||
response.data["name"] = None
|
|
||||||
|
|
||||||
return Response(response.data)
|
|
||||||
else:
|
else:
|
||||||
AuditLog.audit_user_failed_twofactor(
|
AuditLog.audit_user_failed_twofactor(
|
||||||
request.data["username"], debug_info={"ip": request._client_ip}
|
request.data["username"], debug_info={"ip": request._client_ip}
|
||||||
@@ -135,100 +108,9 @@ class LoginViewV2(KnoxLoginView):
|
|||||||
return notify_error("Bad credentials")
|
return notify_error("Bad credentials")
|
||||||
|
|
||||||
|
|
||||||
class GetDeleteActiveLoginSessionsPerUser(APIView):
|
|
||||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
|
||||||
|
|
||||||
class TokenSerializer(ModelSerializer):
|
|
||||||
user = ReadOnlyField(source="user.username")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = AuthToken
|
|
||||||
fields = (
|
|
||||||
"digest",
|
|
||||||
"user",
|
|
||||||
"created",
|
|
||||||
"expiry",
|
|
||||||
)
|
|
||||||
|
|
||||||
def get(self, request, pk):
|
|
||||||
tokens = get_object_or_404(User, pk=pk).auth_token_set.filter(
|
|
||||||
expiry__gt=djangotime.now()
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(self.TokenSerializer(tokens, many=True).data)
|
|
||||||
|
|
||||||
def delete(self, request, pk):
|
|
||||||
tokens = get_object_or_404(User, pk=pk).auth_token_set.filter(
|
|
||||||
expiry__gt=djangotime.now()
|
|
||||||
)
|
|
||||||
|
|
||||||
tokens.delete()
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
|
|
||||||
class DeleteActiveLoginSession(APIView):
|
|
||||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
|
||||||
|
|
||||||
def delete(self, request, pk):
|
|
||||||
token = get_object_or_404(AuthToken, digest=pk)
|
|
||||||
|
|
||||||
token.delete()
|
|
||||||
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
|
|
||||||
class GetAddUsers(APIView):
|
class GetAddUsers(APIView):
|
||||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||||
|
|
||||||
class UserSerializerSSO(ModelSerializer):
|
|
||||||
social_accounts = SerializerMethodField()
|
|
||||||
|
|
||||||
def get_social_accounts(self, obj):
|
|
||||||
accounts = SocialAccount.objects.filter(user_id=obj.pk)
|
|
||||||
|
|
||||||
if accounts:
|
|
||||||
social_accounts = []
|
|
||||||
for account in accounts:
|
|
||||||
try:
|
|
||||||
provider_account = account.get_provider_account()
|
|
||||||
display = provider_account.to_str()
|
|
||||||
except SocialApp.DoesNotExist:
|
|
||||||
display = "Orphaned Provider"
|
|
||||||
except Exception:
|
|
||||||
display = "Unknown"
|
|
||||||
|
|
||||||
social_accounts.append(
|
|
||||||
{
|
|
||||||
"uid": account.uid,
|
|
||||||
"provider": account.provider,
|
|
||||||
"display": display,
|
|
||||||
"last_login": account.last_login,
|
|
||||||
"date_joined": account.date_joined,
|
|
||||||
"extra_data": account.extra_data,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return social_accounts
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = User
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"username",
|
|
||||||
"first_name",
|
|
||||||
"last_name",
|
|
||||||
"email",
|
|
||||||
"is_active",
|
|
||||||
"last_login",
|
|
||||||
"last_login_ip",
|
|
||||||
"role",
|
|
||||||
"block_dashboard_login",
|
|
||||||
"date_format",
|
|
||||||
"social_accounts",
|
|
||||||
]
|
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
search = request.GET.get("search", None)
|
search = request.GET.get("search", None)
|
||||||
|
|
||||||
@@ -239,7 +121,7 @@ class GetAddUsers(APIView):
|
|||||||
else:
|
else:
|
||||||
users = User.objects.filter(agent=None, is_installer_user=False)
|
users = User.objects.filter(agent=None, is_installer_user=False)
|
||||||
|
|
||||||
return Response(self.UserSerializerSSO(users, many=True).data)
|
return Response(UserSerializer(users, many=True).data)
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
# add new user
|
# add new user
|
||||||
@@ -263,7 +145,6 @@ class GetAddUsers(APIView):
|
|||||||
user.role = role
|
user.role = role
|
||||||
|
|
||||||
user.save()
|
user.save()
|
||||||
sync_mesh_perms_task.delay()
|
|
||||||
return Response(user.username)
|
return Response(user.username)
|
||||||
|
|
||||||
|
|
||||||
@@ -278,33 +159,31 @@ class GetUpdateDeleteUser(APIView):
|
|||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
user = get_object_or_404(User, pk=pk)
|
user = get_object_or_404(User, pk=pk)
|
||||||
|
|
||||||
if is_root_user(request=request, user=user):
|
if _is_root_user(request, user):
|
||||||
return notify_error("The root user cannot be modified from the UI")
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
sync_mesh_perms_task.delay()
|
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
user = get_object_or_404(User, pk=pk)
|
user = get_object_or_404(User, pk=pk)
|
||||||
if is_root_user(request=request, user=user):
|
if _is_root_user(request, user):
|
||||||
return notify_error("The root user cannot be deleted from the UI")
|
return notify_error("The root user cannot be deleted from the UI")
|
||||||
|
|
||||||
user.delete()
|
user.delete()
|
||||||
sync_mesh_perms_task.delay()
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class UserActions(APIView):
|
class UserActions(APIView):
|
||||||
permission_classes = [IsAuthenticated, AccountsPerms, LocalUserPerms]
|
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||||
|
|
||||||
# reset password
|
# reset password
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
user = get_object_or_404(User, pk=request.data["id"])
|
user = get_object_or_404(User, pk=request.data["id"])
|
||||||
if is_root_user(request=request, user=user):
|
if _is_root_user(request, user):
|
||||||
return notify_error("The root user cannot be modified from the UI")
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
user.set_password(request.data["password"])
|
user.set_password(request.data["password"])
|
||||||
@@ -315,7 +194,7 @@ class UserActions(APIView):
|
|||||||
# reset two factor token
|
# reset two factor token
|
||||||
def put(self, request):
|
def put(self, request):
|
||||||
user = get_object_or_404(User, pk=request.data["id"])
|
user = get_object_or_404(User, pk=request.data["id"])
|
||||||
if is_root_user(request=request, user=user):
|
if _is_root_user(request, user):
|
||||||
return notify_error("The root user cannot be modified from the UI")
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
user.totp_key = ""
|
user.totp_key = ""
|
||||||
@@ -327,8 +206,10 @@ class UserActions(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class TOTPSetup(APIView):
|
class TOTPSetup(APIView):
|
||||||
|
|
||||||
# totp setup
|
# totp setup
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
|
|
||||||
user = request.user
|
user = request.user
|
||||||
if not user.totp_key:
|
if not user.totp_key:
|
||||||
code = pyotp.random_base32()
|
code = pyotp.random_base32()
|
||||||
@@ -336,7 +217,7 @@ class TOTPSetup(APIView):
|
|||||||
user.save(update_fields=["totp_key"])
|
user.save(update_fields=["totp_key"])
|
||||||
return Response(TOTPSetupSerializer(user).data)
|
return Response(TOTPSetupSerializer(user).data)
|
||||||
|
|
||||||
return Response(False)
|
return Response("totp token already set")
|
||||||
|
|
||||||
|
|
||||||
class UserUI(APIView):
|
class UserUI(APIView):
|
||||||
@@ -375,13 +256,11 @@ class GetUpdateDeleteRole(APIView):
|
|||||||
serializer = RoleSerializer(instance=role, data=request.data)
|
serializer = RoleSerializer(instance=role, data=request.data)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
sync_mesh_perms_task.delay()
|
|
||||||
return Response("Role was edited")
|
return Response("Role was edited")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
role = get_object_or_404(Role, pk=pk)
|
role = get_object_or_404(Role, pk=pk)
|
||||||
role.delete()
|
role.delete()
|
||||||
sync_mesh_perms_task.delay()
|
|
||||||
return Response("Role was removed")
|
return Response("Role was removed")
|
||||||
|
|
||||||
|
|
||||||
@@ -399,7 +278,7 @@ class GetAddAPIKeys(APIView):
|
|||||||
request.data["key"] = get_random_string(length=32).upper()
|
request.data["key"] = get_random_string(length=32).upper()
|
||||||
serializer = APIKeySerializer(data=request.data)
|
serializer = APIKeySerializer(data=request.data)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
obj = serializer.save()
|
||||||
return Response("The API Key was added")
|
return Response("The API Key was added")
|
||||||
|
|
||||||
|
|
||||||
@@ -422,23 +301,3 @@ class GetUpdateDeleteAPIKey(APIView):
|
|||||||
apikey = get_object_or_404(APIKey, pk=pk)
|
apikey = get_object_or_404(APIKey, pk=pk)
|
||||||
apikey.delete()
|
apikey.delete()
|
||||||
return Response("The API Key was deleted")
|
return Response("The API Key was deleted")
|
||||||
|
|
||||||
|
|
||||||
class ResetPass(APIView):
|
|
||||||
permission_classes = [IsAuthenticated, SelfResetSSOPerms]
|
|
||||||
|
|
||||||
def put(self, request):
|
|
||||||
user = request.user
|
|
||||||
user.set_password(request.data["password"])
|
|
||||||
user.save()
|
|
||||||
return Response("Password was reset.")
|
|
||||||
|
|
||||||
|
|
||||||
class Reset2FA(APIView):
|
|
||||||
permission_classes = [IsAuthenticated, SelfResetSSOPerms]
|
|
||||||
|
|
||||||
def put(self, request):
|
|
||||||
user = request.user
|
|
||||||
user.totp_key = ""
|
|
||||||
user.save()
|
|
||||||
return Response("2FA was reset. Log out and back in to setup.")
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import secrets
|
import random
|
||||||
import string
|
import string
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
|
|
||||||
@@ -8,11 +8,10 @@ from django.conf import settings
|
|||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from model_bakery.recipe import Recipe, foreign_key, seq
|
from model_bakery.recipe import Recipe, foreign_key, seq
|
||||||
|
|
||||||
from tacticalrmm.constants import AgentMonType, AgentPlat
|
|
||||||
|
|
||||||
|
def generate_agent_id(hostname):
|
||||||
def generate_agent_id() -> str:
|
rand = "".join(random.choice(string.ascii_letters) for _ in range(35))
|
||||||
return "".join(secrets.choice(string.ascii_letters) for i in range(39))
|
return f"{rand}-{hostname}"
|
||||||
|
|
||||||
|
|
||||||
site = Recipe("clients.Site")
|
site = Recipe("clients.Site")
|
||||||
@@ -25,34 +24,26 @@ def get_wmi_data():
|
|||||||
return json.load(f)
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
def get_win_svcs():
|
|
||||||
svcs = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winsvcs.json")
|
|
||||||
with open(svcs) as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
|
|
||||||
agent = Recipe(
|
agent = Recipe(
|
||||||
"agents.Agent",
|
"agents.Agent",
|
||||||
site=foreign_key(site),
|
site=foreign_key(site),
|
||||||
hostname="DESKTOP-TEST123",
|
hostname="DESKTOP-TEST123",
|
||||||
version="1.3.0",
|
version="1.3.0",
|
||||||
monitoring_type=cycle(AgentMonType.values),
|
monitoring_type=cycle(["workstation", "server"]),
|
||||||
agent_id=seq(generate_agent_id()),
|
agent_id=seq(generate_agent_id("DESKTOP-TEST123")),
|
||||||
last_seen=djangotime.now() - djangotime.timedelta(days=5),
|
last_seen=djangotime.now() - djangotime.timedelta(days=5),
|
||||||
plat=AgentPlat.WINDOWS,
|
plat="windows",
|
||||||
)
|
)
|
||||||
|
|
||||||
server_agent = agent.extend(
|
server_agent = agent.extend(
|
||||||
monitoring_type=AgentMonType.SERVER,
|
monitoring_type="server",
|
||||||
)
|
)
|
||||||
|
|
||||||
workstation_agent = agent.extend(
|
workstation_agent = agent.extend(
|
||||||
monitoring_type=AgentMonType.WORKSTATION,
|
monitoring_type="workstation",
|
||||||
)
|
)
|
||||||
|
|
||||||
online_agent = agent.extend(
|
online_agent = agent.extend(last_seen=djangotime.now())
|
||||||
last_seen=djangotime.now(), services=get_win_svcs(), wmi_detail=get_wmi_data()
|
|
||||||
)
|
|
||||||
|
|
||||||
offline_agent = agent.extend(
|
offline_agent = agent.extend(
|
||||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=7)
|
last_seen=djangotime.now() - djangotime.timedelta(minutes=7)
|
||||||
@@ -87,4 +78,4 @@ agent_with_services = agent.extend(
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
agent_with_wmi = agent.extend(wmi_detail=get_wmi_data())
|
agent_with_wmi = agent.extend(wmi=get_wmi_data())
|
||||||
|
|||||||
@@ -1,82 +0,0 @@
|
|||||||
from agents.models import Agent, AgentHistory
|
|
||||||
from channels.db import database_sync_to_async
|
|
||||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
|
||||||
from django.contrib.auth.models import AnonymousUser
|
|
||||||
from django.shortcuts import get_object_or_404
|
|
||||||
from tacticalrmm.constants import AGENT_DEFER, AgentHistoryType
|
|
||||||
from tacticalrmm.permissions import _has_perm_on_agent
|
|
||||||
|
|
||||||
|
|
||||||
class SendCMD(AsyncJsonWebsocketConsumer):
|
|
||||||
async def connect(self):
|
|
||||||
self.user = self.scope["user"]
|
|
||||||
|
|
||||||
if isinstance(self.user, AnonymousUser):
|
|
||||||
await self.close()
|
|
||||||
|
|
||||||
await self.accept()
|
|
||||||
|
|
||||||
async def receive_json(self, payload, **kwargs):
|
|
||||||
auth = await self.has_perm(payload["agent_id"])
|
|
||||||
if not auth:
|
|
||||||
await self.send_json(
|
|
||||||
{"ret": "You do not have permission to perform this action."}
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
agent = await self.get_agent(payload["agent_id"])
|
|
||||||
timeout = int(payload["timeout"])
|
|
||||||
if payload["shell"] == "custom" and payload["custom_shell"]:
|
|
||||||
shell = payload["custom_shell"]
|
|
||||||
else:
|
|
||||||
shell = payload["shell"]
|
|
||||||
|
|
||||||
hist_pk = await self.get_history_id(agent, payload["cmd"])
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"func": "rawcmd",
|
|
||||||
"timeout": timeout,
|
|
||||||
"payload": {
|
|
||||||
"command": payload["cmd"],
|
|
||||||
"shell": shell,
|
|
||||||
},
|
|
||||||
"id": hist_pk,
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = await agent.nats_cmd(data, timeout=timeout + 2)
|
|
||||||
await self.send_json({"ret": ret})
|
|
||||||
|
|
||||||
async def disconnect(self, _):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _has_perm(self, perm: str) -> bool:
|
|
||||||
if self.user.is_superuser or (
|
|
||||||
self.user.role and getattr(self.user.role, "is_superuser")
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# make sure non-superusers with empty roles aren't permitted
|
|
||||||
elif not self.user.role:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return self.user.role and getattr(self.user.role, perm)
|
|
||||||
|
|
||||||
@database_sync_to_async # type: ignore
|
|
||||||
def get_agent(self, agent_id: str) -> "Agent":
|
|
||||||
return get_object_or_404(Agent.objects.defer(*AGENT_DEFER), agent_id=agent_id)
|
|
||||||
|
|
||||||
@database_sync_to_async # type: ignore
|
|
||||||
def get_history_id(self, agent: "Agent", cmd: str) -> int:
|
|
||||||
hist = AgentHistory.objects.create(
|
|
||||||
agent=agent,
|
|
||||||
type=AgentHistoryType.CMD_RUN,
|
|
||||||
command=cmd,
|
|
||||||
username=self.user.username[:50],
|
|
||||||
)
|
|
||||||
return hist.pk
|
|
||||||
|
|
||||||
@database_sync_to_async # type: ignore
|
|
||||||
def has_perm(self, agent_id: str) -> bool:
|
|
||||||
return self._has_perm("can_send_cmd") and _has_perm_on_agent(
|
|
||||||
self.user, agent_id
|
|
||||||
)
|
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from clients.models import Client, Site
|
from clients.models import Client, Site
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
|
from agents.models import Agent
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from tacticalrmm.constants import AGENT_DEFER
|
from tacticalrmm.constants import AGENT_DEFER
|
||||||
from tacticalrmm.utils import reload_nats
|
from tacticalrmm.utils import reload_nats
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "Delete multiple agents based on criteria"
|
help = "Delete old agents"
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@@ -23,21 +23,6 @@ class Command(BaseCommand):
|
|||||||
type=str,
|
type=str,
|
||||||
help="Delete agents that equal to or less than this version",
|
help="Delete agents that equal to or less than this version",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
"--site",
|
|
||||||
type=str,
|
|
||||||
help="Delete agents that belong to the specified site",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--client",
|
|
||||||
type=str,
|
|
||||||
help="Delete agents that belong to the specified client",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--hostname",
|
|
||||||
type=str,
|
|
||||||
help="Delete agents with hostname starting with argument",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--delete",
|
"--delete",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -47,40 +32,25 @@ class Command(BaseCommand):
|
|||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
days = kwargs["days"]
|
days = kwargs["days"]
|
||||||
agentver = kwargs["agentver"]
|
agentver = kwargs["agentver"]
|
||||||
site = kwargs["site"]
|
|
||||||
client = kwargs["client"]
|
|
||||||
hostname = kwargs["hostname"]
|
|
||||||
delete = kwargs["delete"]
|
delete = kwargs["delete"]
|
||||||
|
|
||||||
if not days and not agentver and not site and not client and not hostname:
|
if not days and not agentver:
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.ERROR(
|
self.style.ERROR("Must have at least one parameter: days or agentver")
|
||||||
"Must have at least one parameter: days, agentver, site, client or hostname"
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
agents = Agent.objects.select_related("site__client").defer(*AGENT_DEFER)
|
q = Agent.objects.defer(*AGENT_DEFER)
|
||||||
|
|
||||||
|
agents = []
|
||||||
if days:
|
if days:
|
||||||
overdue = djangotime.now() - djangotime.timedelta(days=days)
|
overdue = djangotime.now() - djangotime.timedelta(days=days)
|
||||||
agents = agents.filter(last_seen__lt=overdue)
|
agents = [i for i in q if i.last_seen < overdue]
|
||||||
|
|
||||||
if site:
|
|
||||||
agents = agents.filter(site__name=site)
|
|
||||||
|
|
||||||
if client:
|
|
||||||
agents = agents.filter(site__client__name=client)
|
|
||||||
|
|
||||||
if hostname:
|
|
||||||
agents = agents.filter(hostname__istartswith=hostname)
|
|
||||||
|
|
||||||
if agentver:
|
if agentver:
|
||||||
agents = [
|
agents = [i for i in q if pyver.parse(i.version) <= pyver.parse(agentver)]
|
||||||
i for i in agents if pyver.parse(i.version) <= pyver.parse(agentver)
|
|
||||||
]
|
|
||||||
|
|
||||||
if len(agents) == 0:
|
if not agents:
|
||||||
self.stdout.write(self.style.ERROR("No agents matched"))
|
self.stdout.write(self.style.ERROR("No agents matched"))
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -94,7 +64,7 @@ class Command(BaseCommand):
|
|||||||
try:
|
try:
|
||||||
agent.delete()
|
agent.delete()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err = f"Failed to delete agent {agent.hostname}: {e}"
|
err = f"Failed to delete agent {agent.hostname}: {str(e)}"
|
||||||
self.stdout.write(self.style.ERROR(err))
|
self.stdout.write(self.style.ERROR(err))
|
||||||
else:
|
else:
|
||||||
deleted_count += 1
|
deleted_count += 1
|
||||||
|
|||||||
@@ -1,17 +1,17 @@
|
|||||||
# import datetime as dt
|
# import datetime as dt
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
from agents.models import Agent
|
||||||
|
from core.tasks import cache_db_fields_task, handle_resolved_stuff
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from core.tasks import cache_db_fields_task
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "stuff for demo site in cron"
|
help = "stuff for demo site in cron"
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
|
|
||||||
random_dates = []
|
random_dates = []
|
||||||
now = djangotime.now()
|
now = djangotime.now()
|
||||||
|
|
||||||
@@ -23,9 +23,18 @@ class Command(BaseCommand):
|
|||||||
rand = now - djangotime.timedelta(minutes=random.randint(10, 20))
|
rand = now - djangotime.timedelta(minutes=random.randint(10, 20))
|
||||||
random_dates.append(rand)
|
random_dates.append(rand)
|
||||||
|
|
||||||
|
""" for _ in range(5):
|
||||||
|
rand = djangotime.now() - djangotime.timedelta(hours=random.randint(1, 10))
|
||||||
|
random_dates.append(rand)
|
||||||
|
|
||||||
|
for _ in range(5):
|
||||||
|
rand = djangotime.now() - djangotime.timedelta(days=random.randint(40, 90))
|
||||||
|
random_dates.append(rand) """
|
||||||
|
|
||||||
agents = Agent.objects.only("last_seen")
|
agents = Agent.objects.only("last_seen")
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
agent.last_seen = random.choice(random_dates)
|
agent.last_seen = random.choice(random_dates)
|
||||||
agent.save(update_fields=["last_seen"])
|
agent.save(update_fields=["last_seen"])
|
||||||
|
|
||||||
cache_db_fields_task()
|
cache_db_fields_task()
|
||||||
|
handle_resolved_stuff()
|
||||||
|
|||||||
@@ -3,55 +3,28 @@ import json
|
|||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.management import call_command
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.utils import timezone as djangotime
|
|
||||||
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
from agents.models import Agent, AgentHistory
|
from agents.models import Agent, AgentHistory
|
||||||
from automation.models import Policy
|
from automation.models import Policy
|
||||||
from autotasks.models import AutomatedTask, TaskResult
|
from autotasks.models import AutomatedTask, TaskResult
|
||||||
from checks.models import Check, CheckHistory, CheckResult
|
from checks.models import Check, CheckResult, CheckHistory
|
||||||
from clients.models import Client, Site
|
from clients.models import Client, Site
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.management import call_command
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
from logs.models import AuditLog, PendingAction
|
from logs.models import AuditLog, PendingAction
|
||||||
from scripts.models import Script
|
from scripts.models import Script
|
||||||
from software.models import InstalledSoftware
|
from software.models import InstalledSoftware
|
||||||
from tacticalrmm.constants import (
|
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||||
AgentHistoryType,
|
|
||||||
AgentMonType,
|
|
||||||
AgentPlat,
|
|
||||||
AlertSeverity,
|
|
||||||
CheckStatus,
|
|
||||||
CheckType,
|
|
||||||
EvtLogFailWhen,
|
|
||||||
EvtLogNames,
|
|
||||||
EvtLogTypes,
|
|
||||||
GoArch,
|
|
||||||
PAAction,
|
|
||||||
ScriptShell,
|
|
||||||
TaskSyncStatus,
|
|
||||||
TaskType,
|
|
||||||
)
|
|
||||||
from tacticalrmm.demo_data import (
|
from tacticalrmm.demo_data import (
|
||||||
check_network_loc_aware_ps1,
|
|
||||||
check_storage_pool_health_ps1,
|
|
||||||
clear_print_spool_bat,
|
|
||||||
disks,
|
disks,
|
||||||
disks_linux_deb,
|
|
||||||
disks_linux_pi,
|
|
||||||
ping_fail_output,
|
ping_fail_output,
|
||||||
ping_success_output,
|
ping_success_output,
|
||||||
restart_nla_ps1,
|
|
||||||
show_temp_dir_py,
|
|
||||||
spooler_stdout,
|
spooler_stdout,
|
||||||
temp_dir_stdout,
|
temp_dir_stdout,
|
||||||
wmi_deb,
|
|
||||||
wmi_pi,
|
|
||||||
wmi_mac,
|
|
||||||
disks_mac,
|
|
||||||
)
|
)
|
||||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
|
||||||
|
|
||||||
AGENTS_TO_GENERATE = 250
|
AGENTS_TO_GENERATE = 250
|
||||||
|
|
||||||
@@ -70,18 +43,19 @@ EVT_LOG_FAIL = settings.BASE_DIR.joinpath(
|
|||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "populate database with fake agents"
|
help = "populate database with fake agents"
|
||||||
|
|
||||||
def rand_string(self, length: int) -> str:
|
def rand_string(self, length):
|
||||||
chars = string.ascii_letters
|
chars = string.ascii_letters
|
||||||
return "".join(random.choice(chars) for _ in range(length))
|
return "".join(random.choice(chars) for _ in range(length))
|
||||||
|
|
||||||
def handle(self, *args, **kwargs) -> None:
|
def handle(self, *args, **kwargs):
|
||||||
|
|
||||||
user = User.objects.first()
|
user = User.objects.first()
|
||||||
if user:
|
if user:
|
||||||
user.totp_key = "ABSA234234"
|
user.totp_key = "ABSA234234"
|
||||||
user.save(update_fields=["totp_key"])
|
user.save(update_fields=["totp_key"])
|
||||||
|
|
||||||
Agent.objects.all().delete()
|
|
||||||
Client.objects.all().delete()
|
Client.objects.all().delete()
|
||||||
|
Agent.objects.all().delete()
|
||||||
Check.objects.all().delete()
|
Check.objects.all().delete()
|
||||||
Script.objects.all().delete()
|
Script.objects.all().delete()
|
||||||
AutomatedTask.objects.all().delete()
|
AutomatedTask.objects.all().delete()
|
||||||
@@ -91,9 +65,6 @@ class Command(BaseCommand):
|
|||||||
PendingAction.objects.all().delete()
|
PendingAction.objects.all().delete()
|
||||||
|
|
||||||
call_command("load_community_scripts")
|
call_command("load_community_scripts")
|
||||||
call_command("initial_db_setup")
|
|
||||||
call_command("load_chocos")
|
|
||||||
call_command("create_installer_user")
|
|
||||||
|
|
||||||
# policies
|
# policies
|
||||||
check_policy = Policy()
|
check_policy = Policy()
|
||||||
@@ -124,27 +95,27 @@ class Command(BaseCommand):
|
|||||||
update_policy.email_if_fail = True
|
update_policy.email_if_fail = True
|
||||||
update_policy.save()
|
update_policy.save()
|
||||||
|
|
||||||
clients = (
|
clients = [
|
||||||
"Company 1",
|
|
||||||
"Company 2",
|
"Company 2",
|
||||||
"Company 3",
|
"Company 3",
|
||||||
|
"Company 1",
|
||||||
"Company 4",
|
"Company 4",
|
||||||
"Company 5",
|
"Company 5",
|
||||||
"Company 6",
|
"Company 6",
|
||||||
)
|
]
|
||||||
sites1 = ("HQ1", "LA Office 1", "NY Office 1")
|
sites1 = ["HQ1", "LA Office 1", "NY Office 1"]
|
||||||
sites2 = ("HQ2", "LA Office 2", "NY Office 2")
|
sites2 = ["HQ2", "LA Office 2", "NY Office 2"]
|
||||||
sites3 = ("HQ3", "LA Office 3", "NY Office 3")
|
sites3 = ["HQ3", "LA Office 3", "NY Office 3"]
|
||||||
sites4 = ("HQ4", "LA Office 4", "NY Office 4")
|
sites4 = ["HQ4", "LA Office 4", "NY Office 4"]
|
||||||
sites5 = ("HQ5", "LA Office 5", "NY Office 5")
|
sites5 = ["HQ5", "LA Office 5", "NY Office 5"]
|
||||||
sites6 = ("HQ6", "LA Office 6", "NY Office 6")
|
sites6 = ["HQ6", "LA Office 6", "NY Office 6"]
|
||||||
|
|
||||||
client1 = Client(name=clients[0])
|
client1 = Client(name="Company 1")
|
||||||
client2 = Client(name=clients[1])
|
client2 = Client(name="Company 2")
|
||||||
client3 = Client(name=clients[2])
|
client3 = Client(name="Company 3")
|
||||||
client4 = Client(name=clients[3])
|
client4 = Client(name="Company 4")
|
||||||
client5 = Client(name=clients[4])
|
client5 = Client(name="Company 5")
|
||||||
client6 = Client(name=clients[5])
|
client6 = Client(name="Company 6")
|
||||||
|
|
||||||
client1.save()
|
client1.save()
|
||||||
client2.save()
|
client2.save()
|
||||||
@@ -171,7 +142,7 @@ class Command(BaseCommand):
|
|||||||
for site in sites6:
|
for site in sites6:
|
||||||
Site(client=client6, name=site).save()
|
Site(client=client6, name=site).save()
|
||||||
|
|
||||||
hostnames = (
|
hostnames = [
|
||||||
"DC-1",
|
"DC-1",
|
||||||
"DC-2",
|
"DC-2",
|
||||||
"FSV-1",
|
"FSV-1",
|
||||||
@@ -179,33 +150,26 @@ class Command(BaseCommand):
|
|||||||
"WSUS",
|
"WSUS",
|
||||||
"DESKTOP-12345",
|
"DESKTOP-12345",
|
||||||
"LAPTOP-55443",
|
"LAPTOP-55443",
|
||||||
"db-aws-01",
|
]
|
||||||
"Karens-MacBook-Air.local",
|
descriptions = ["Bob's computer", "Primary DC", "File Server", "Karen's Laptop"]
|
||||||
)
|
modes = ["server", "workstation"]
|
||||||
descriptions = ("Bob's computer", "Primary DC", "File Server", "Karen's Laptop")
|
op_systems_servers = [
|
||||||
modes = AgentMonType.values
|
|
||||||
op_systems_servers = (
|
|
||||||
"Microsoft Windows Server 2016 Standard, 64bit (build 14393)",
|
"Microsoft Windows Server 2016 Standard, 64bit (build 14393)",
|
||||||
"Microsoft Windows Server 2012 R2 Standard, 64bit (build 9600)",
|
"Microsoft Windows Server 2012 R2 Standard, 64bit (build 9600)",
|
||||||
"Microsoft Windows Server 2019 Standard, 64bit (build 17763)",
|
"Microsoft Windows Server 2019 Standard, 64bit (build 17763)",
|
||||||
)
|
]
|
||||||
|
|
||||||
op_systems_workstations = (
|
op_systems_workstations = [
|
||||||
"Microsoft Windows 8.1 Pro, 64bit (build 9600)",
|
"Microsoft Windows 8.1 Pro, 64bit (build 9600)",
|
||||||
"Microsoft Windows 10 Pro for Workstations, 64bit (build 18363)",
|
"Microsoft Windows 10 Pro for Workstations, 64bit (build 18363)",
|
||||||
"Microsoft Windows 10 Pro, 64bit (build 18363)",
|
"Microsoft Windows 10 Pro, 64bit (build 18363)",
|
||||||
)
|
]
|
||||||
|
|
||||||
linux_deb_os = "Debian 11.2 x86_64 5.10.0-11-amd64"
|
public_ips = ["65.234.22.4", "74.123.43.5", "44.21.134.45"]
|
||||||
linux_pi_os = "Raspbian 11.2 armv7l 5.10.92-v7+"
|
|
||||||
mac_os = "Darwin 12.5.1 arm64 21.6.0"
|
|
||||||
|
|
||||||
public_ips = ("65.234.22.4", "74.123.43.5", "44.21.134.45")
|
total_rams = [4, 8, 16, 32, 64, 128]
|
||||||
|
|
||||||
total_rams = (4, 8, 16, 32, 64, 128)
|
|
||||||
|
|
||||||
now = dt.datetime.now()
|
now = dt.datetime.now()
|
||||||
django_now = djangotime.now()
|
|
||||||
|
|
||||||
boot_times = []
|
boot_times = []
|
||||||
|
|
||||||
@@ -217,7 +181,7 @@ class Command(BaseCommand):
|
|||||||
rand_days = now - dt.timedelta(days=random.randint(2, 50))
|
rand_days = now - dt.timedelta(days=random.randint(2, 50))
|
||||||
boot_times.append(str(rand_days.timestamp()))
|
boot_times.append(str(rand_days.timestamp()))
|
||||||
|
|
||||||
user_names = ("None", "Karen", "Steve", "jsmith", "jdoe")
|
user_names = ["None", "Karen", "Steve", "jsmith", "jdoe"]
|
||||||
|
|
||||||
with open(SVCS) as f:
|
with open(SVCS) as f:
|
||||||
services = json.load(f)
|
services = json.load(f)
|
||||||
@@ -232,7 +196,10 @@ class Command(BaseCommand):
|
|||||||
with open(WMI_3) as f:
|
with open(WMI_3) as f:
|
||||||
wmi3 = json.load(f)
|
wmi3 = json.load(f)
|
||||||
|
|
||||||
wmi_details = [i for i in (wmi1, wmi2, wmi3)]
|
wmi_details = []
|
||||||
|
wmi_details.append(wmi1)
|
||||||
|
wmi_details.append(wmi2)
|
||||||
|
wmi_details.append(wmi3)
|
||||||
|
|
||||||
# software
|
# software
|
||||||
with open(SW_1) as f:
|
with open(SW_1) as f:
|
||||||
@@ -241,7 +208,9 @@ class Command(BaseCommand):
|
|||||||
with open(SW_2) as f:
|
with open(SW_2) as f:
|
||||||
software2 = json.load(f)
|
software2 = json.load(f)
|
||||||
|
|
||||||
softwares = [i for i in (software1, software2)]
|
softwares = []
|
||||||
|
softwares.append(software1)
|
||||||
|
softwares.append(software2)
|
||||||
|
|
||||||
# windows updates
|
# windows updates
|
||||||
with open(WIN_UPDATES) as f:
|
with open(WIN_UPDATES) as f:
|
||||||
@@ -257,103 +226,74 @@ class Command(BaseCommand):
|
|||||||
clear_spool.name = "Clear Print Spooler"
|
clear_spool.name = "Clear Print Spooler"
|
||||||
clear_spool.description = "clears the print spooler. Fuck printers"
|
clear_spool.description = "clears the print spooler. Fuck printers"
|
||||||
clear_spool.filename = "clear_print_spool.bat"
|
clear_spool.filename = "clear_print_spool.bat"
|
||||||
clear_spool.shell = ScriptShell.CMD
|
clear_spool.shell = "cmd"
|
||||||
clear_spool.script_body = clear_print_spool_bat
|
|
||||||
clear_spool.save()
|
clear_spool.save()
|
||||||
|
|
||||||
check_net_aware = Script()
|
check_net_aware = Script()
|
||||||
check_net_aware.name = "Check Network Location Awareness"
|
check_net_aware.name = "Check Network Location Awareness"
|
||||||
check_net_aware.description = "Check's network location awareness on domain computers, should always be domain profile and not public or private. Sometimes happens when computer restarts before domain available. This script will return 0 if check passes or 1 if it fails."
|
check_net_aware.description = "Check's network location awareness on domain computers, should always be domain profile and not public or private. Sometimes happens when computer restarts before domain available. This script will return 0 if check passes or 1 if it fails."
|
||||||
check_net_aware.filename = "check_network_loc_aware.ps1"
|
check_net_aware.filename = "check_network_loc_aware.ps1"
|
||||||
check_net_aware.shell = ScriptShell.POWERSHELL
|
check_net_aware.shell = "powershell"
|
||||||
check_net_aware.script_body = check_network_loc_aware_ps1
|
|
||||||
check_net_aware.save()
|
check_net_aware.save()
|
||||||
|
|
||||||
check_pool_health = Script()
|
check_pool_health = Script()
|
||||||
check_pool_health.name = "Check storage spool health"
|
check_pool_health.name = "Check storage spool health"
|
||||||
check_pool_health.description = "loops through all storage pools and will fail if any of them are not healthy"
|
check_pool_health.description = "loops through all storage pools and will fail if any of them are not healthy"
|
||||||
check_pool_health.filename = "check_storage_pool_health.ps1"
|
check_pool_health.filename = "check_storage_pool_health.ps1"
|
||||||
check_pool_health.shell = ScriptShell.POWERSHELL
|
check_pool_health.shell = "powershell"
|
||||||
check_pool_health.script_body = check_storage_pool_health_ps1
|
|
||||||
check_pool_health.save()
|
check_pool_health.save()
|
||||||
|
|
||||||
restart_nla = Script()
|
restart_nla = Script()
|
||||||
restart_nla.name = "Restart NLA Service"
|
restart_nla.name = "Restart NLA Service"
|
||||||
restart_nla.description = "restarts the Network Location Awareness windows service to fix the nic profile. Run this after the check network service fails"
|
restart_nla.description = "restarts the Network Location Awareness windows service to fix the nic profile. Run this after the check network service fails"
|
||||||
restart_nla.filename = "restart_nla.ps1"
|
restart_nla.filename = "restart_nla.ps1"
|
||||||
restart_nla.shell = ScriptShell.POWERSHELL
|
restart_nla.shell = "powershell"
|
||||||
restart_nla.script_body = restart_nla_ps1
|
|
||||||
restart_nla.save()
|
restart_nla.save()
|
||||||
|
|
||||||
show_tmp_dir_script = Script()
|
show_tmp_dir_script = Script()
|
||||||
show_tmp_dir_script.name = "Check temp dir"
|
show_tmp_dir_script.name = "Check temp dir"
|
||||||
show_tmp_dir_script.description = "shows files in temp dir using python"
|
show_tmp_dir_script.description = "shows files in temp dir using python"
|
||||||
show_tmp_dir_script.filename = "show_temp_dir.py"
|
show_tmp_dir_script.filename = "show_temp_dir.py"
|
||||||
show_tmp_dir_script.shell = ScriptShell.PYTHON
|
show_tmp_dir_script.shell = "python"
|
||||||
show_tmp_dir_script.script_body = show_temp_dir_py
|
|
||||||
show_tmp_dir_script.save()
|
show_tmp_dir_script.save()
|
||||||
|
|
||||||
for count_agents in range(AGENTS_TO_GENERATE):
|
for count_agents in range(AGENTS_TO_GENERATE):
|
||||||
|
|
||||||
client = random.choice(clients)
|
client = random.choice(clients)
|
||||||
|
|
||||||
if client == clients[0]:
|
if client == "Company 1":
|
||||||
site = random.choice(sites1)
|
site = random.choice(sites1)
|
||||||
elif client == clients[1]:
|
elif client == "Company 2":
|
||||||
site = random.choice(sites2)
|
site = random.choice(sites2)
|
||||||
elif client == clients[2]:
|
elif client == "Company 3":
|
||||||
site = random.choice(sites3)
|
site = random.choice(sites3)
|
||||||
elif client == clients[3]:
|
elif client == "Company 4":
|
||||||
site = random.choice(sites4)
|
site = random.choice(sites4)
|
||||||
elif client == clients[4]:
|
elif client == "Company 5":
|
||||||
site = random.choice(sites5)
|
site = random.choice(sites5)
|
||||||
elif client == clients[5]:
|
elif client == "Company 6":
|
||||||
site = random.choice(sites6)
|
site = random.choice(sites6)
|
||||||
|
else:
|
||||||
|
site = None
|
||||||
|
|
||||||
agent = Agent()
|
agent = Agent()
|
||||||
|
|
||||||
plat_pick = random.randint(1, 15)
|
|
||||||
if plat_pick in (7, 11):
|
|
||||||
agent.plat = AgentPlat.LINUX
|
|
||||||
mode = AgentMonType.SERVER
|
|
||||||
# pi arm
|
|
||||||
if plat_pick == 7:
|
|
||||||
agent.goarch = GoArch.ARM32
|
|
||||||
agent.wmi_detail = wmi_pi
|
|
||||||
agent.disks = disks_linux_pi
|
|
||||||
agent.operating_system = linux_pi_os
|
|
||||||
else:
|
|
||||||
agent.goarch = GoArch.AMD64
|
|
||||||
agent.wmi_detail = wmi_deb
|
|
||||||
agent.disks = disks_linux_deb
|
|
||||||
agent.operating_system = linux_deb_os
|
|
||||||
elif plat_pick in (4, 14):
|
|
||||||
agent.plat = AgentPlat.DARWIN
|
|
||||||
mode = random.choice([AgentMonType.SERVER, AgentMonType.WORKSTATION])
|
|
||||||
agent.goarch = GoArch.ARM64
|
|
||||||
agent.wmi_detail = wmi_mac
|
|
||||||
agent.disks = disks_mac
|
|
||||||
agent.operating_system = mac_os
|
|
||||||
else:
|
|
||||||
agent.plat = AgentPlat.WINDOWS
|
|
||||||
agent.goarch = GoArch.AMD64
|
|
||||||
mode = random.choice(modes)
|
mode = random.choice(modes)
|
||||||
agent.wmi_detail = random.choice(wmi_details)
|
if mode == "server":
|
||||||
agent.services = services
|
|
||||||
agent.disks = random.choice(disks)
|
|
||||||
if mode == AgentMonType.SERVER:
|
|
||||||
agent.operating_system = random.choice(op_systems_servers)
|
agent.operating_system = random.choice(op_systems_servers)
|
||||||
else:
|
else:
|
||||||
agent.operating_system = random.choice(op_systems_workstations)
|
agent.operating_system = random.choice(op_systems_workstations)
|
||||||
|
|
||||||
agent.version = settings.LATEST_AGENT_VER
|
|
||||||
agent.hostname = random.choice(hostnames)
|
agent.hostname = random.choice(hostnames)
|
||||||
|
agent.version = settings.LATEST_AGENT_VER
|
||||||
agent.site = Site.objects.get(name=site)
|
agent.site = Site.objects.get(name=site)
|
||||||
agent.agent_id = self.rand_string(40)
|
agent.agent_id = self.rand_string(25)
|
||||||
agent.description = random.choice(descriptions)
|
agent.description = random.choice(descriptions)
|
||||||
agent.monitoring_type = mode
|
agent.monitoring_type = mode
|
||||||
agent.public_ip = random.choice(public_ips)
|
agent.public_ip = random.choice(public_ips)
|
||||||
agent.last_seen = django_now
|
agent.last_seen = djangotime.now()
|
||||||
|
agent.plat = "windows"
|
||||||
|
agent.plat_release = "windows-2019Server"
|
||||||
agent.total_ram = random.choice(total_rams)
|
agent.total_ram = random.choice(total_rams)
|
||||||
agent.boot_time = random.choice(boot_times)
|
agent.boot_time = random.choice(boot_times)
|
||||||
agent.logged_in_username = random.choice(user_names)
|
agent.logged_in_username = random.choice(user_names)
|
||||||
@@ -363,20 +303,24 @@ class Command(BaseCommand):
|
|||||||
agent.overdue_email_alert = random.choice([True, False])
|
agent.overdue_email_alert = random.choice([True, False])
|
||||||
agent.overdue_text_alert = random.choice([True, False])
|
agent.overdue_text_alert = random.choice([True, False])
|
||||||
agent.needs_reboot = random.choice([True, False])
|
agent.needs_reboot = random.choice([True, False])
|
||||||
|
agent.wmi_detail = random.choice(wmi_details)
|
||||||
|
agent.services = services
|
||||||
|
agent.disks = random.choice(disks)
|
||||||
|
|
||||||
agent.save()
|
agent.save()
|
||||||
|
|
||||||
if agent.plat == AgentPlat.WINDOWS:
|
|
||||||
InstalledSoftware(agent=agent, software=random.choice(softwares)).save()
|
InstalledSoftware(agent=agent, software=random.choice(softwares)).save()
|
||||||
|
|
||||||
if mode == AgentMonType.WORKSTATION:
|
if mode == "workstation":
|
||||||
WinUpdatePolicy(agent=agent, run_time_days=[5, 6]).save()
|
WinUpdatePolicy(agent=agent, run_time_days=[5, 6]).save()
|
||||||
else:
|
else:
|
||||||
WinUpdatePolicy(agent=agent).save()
|
WinUpdatePolicy(agent=agent).save()
|
||||||
|
|
||||||
if agent.plat == AgentPlat.WINDOWS:
|
|
||||||
# windows updates load
|
# windows updates load
|
||||||
guids = [i for i in windows_updates.keys()]
|
guids = []
|
||||||
|
for k in windows_updates.keys():
|
||||||
|
guids.append(k)
|
||||||
|
|
||||||
for i in guids:
|
for i in guids:
|
||||||
WinUpdate(
|
WinUpdate(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
@@ -392,7 +336,7 @@ class Command(BaseCommand):
|
|||||||
# agent histories
|
# agent histories
|
||||||
hist = AgentHistory()
|
hist = AgentHistory()
|
||||||
hist.agent = agent
|
hist.agent = agent
|
||||||
hist.type = AgentHistoryType.CMD_RUN
|
hist.type = "cmd_run"
|
||||||
hist.command = "ping google.com"
|
hist.command = "ping google.com"
|
||||||
hist.username = "demo"
|
hist.username = "demo"
|
||||||
hist.results = ping_success_output
|
hist.results = ping_success_output
|
||||||
@@ -400,7 +344,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
hist1 = AgentHistory()
|
hist1 = AgentHistory()
|
||||||
hist1.agent = agent
|
hist1.agent = agent
|
||||||
hist1.type = AgentHistoryType.SCRIPT_RUN
|
hist1.type = "script_run"
|
||||||
hist1.script = clear_spool
|
hist1.script = clear_spool
|
||||||
hist1.script_results = {
|
hist1.script_results = {
|
||||||
"id": 1,
|
"id": 1,
|
||||||
@@ -411,11 +355,16 @@ class Command(BaseCommand):
|
|||||||
}
|
}
|
||||||
hist1.save()
|
hist1.save()
|
||||||
|
|
||||||
if agent.plat == AgentPlat.WINDOWS:
|
|
||||||
# disk space check
|
# disk space check
|
||||||
check1 = Check()
|
check1 = Check()
|
||||||
|
check_result1 = CheckResult(assigned_check=check1, agent=agent)
|
||||||
check1.agent = agent
|
check1.agent = agent
|
||||||
check1.check_type = CheckType.DISK_SPACE
|
check1.check_type = "diskspace"
|
||||||
|
check_result1.status = "passing"
|
||||||
|
check_result1.last_run = djangotime.now()
|
||||||
|
check_result1.more_info = "Total: 498.7GB, Free: 287.4GB"
|
||||||
|
check_result1.save()
|
||||||
|
|
||||||
check1.warning_threshold = 25
|
check1.warning_threshold = 25
|
||||||
check1.error_threshold = 10
|
check1.error_threshold = 10
|
||||||
check1.disk = "C:"
|
check1.disk = "C:"
|
||||||
@@ -423,45 +372,33 @@ class Command(BaseCommand):
|
|||||||
check1.text_alert = random.choice([True, False])
|
check1.text_alert = random.choice([True, False])
|
||||||
check1.save()
|
check1.save()
|
||||||
|
|
||||||
check_result1 = CheckResult()
|
|
||||||
check_result1.agent = agent
|
|
||||||
check_result1.assigned_check = check1
|
|
||||||
check_result1.status = CheckStatus.PASSING
|
|
||||||
check_result1.last_run = django_now
|
|
||||||
check_result1.more_info = "Total: 498.7GB, Free: 287.4GB"
|
|
||||||
check_result1.save()
|
|
||||||
|
|
||||||
for i in range(30):
|
for i in range(30):
|
||||||
check1_history = CheckHistory()
|
check1_history = CheckHistory()
|
||||||
check1_history.check_id = check1.pk
|
check1_history.check_id = check1.pk
|
||||||
check1_history.agent_id = agent.agent_id
|
check1_history.agent_id = agent.agent_id
|
||||||
check1_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
check1_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
check1_history.y = random.randint(13, 40)
|
check1_history.y = random.randint(13, 40)
|
||||||
check1_history.save()
|
check1_history.save()
|
||||||
|
|
||||||
# ping check
|
# ping check
|
||||||
check2 = Check()
|
check2 = Check()
|
||||||
check_result2 = CheckResult()
|
check_result2 = CheckResult(assigned_check=check2, agent=agent)
|
||||||
|
|
||||||
check2.agent = agent
|
check2.agent = agent
|
||||||
check2.check_type = CheckType.PING
|
check2.check_type = "ping"
|
||||||
|
check_result2.last_run = djangotime.now()
|
||||||
check2.email_alert = random.choice([True, False])
|
check2.email_alert = random.choice([True, False])
|
||||||
check2.text_alert = random.choice([True, False])
|
check2.text_alert = random.choice([True, False])
|
||||||
|
|
||||||
check_result2.agent = agent
|
|
||||||
check_result2.assigned_check = check2
|
|
||||||
check_result2.last_run = django_now
|
|
||||||
|
|
||||||
if site in sites5:
|
if site in sites5:
|
||||||
check2.name = "Synology NAS"
|
check2.name = "Synology NAS"
|
||||||
check2.alert_severity = AlertSeverity.ERROR
|
check_result2.status = "failing"
|
||||||
check_result2.status = CheckStatus.FAILING
|
|
||||||
check2.ip = "172.17.14.26"
|
check2.ip = "172.17.14.26"
|
||||||
check_result2.more_info = ping_fail_output
|
check_result2.more_info = ping_fail_output
|
||||||
else:
|
else:
|
||||||
check2.name = "Google"
|
check2.name = "Google"
|
||||||
check_result2.status = CheckStatus.PASSING
|
check_result2.status = "passing"
|
||||||
check2.ip = "8.8.8.8"
|
check2.ip = "8.8.8.8"
|
||||||
check_result2.more_info = ping_success_output
|
check_result2.more_info = ping_success_output
|
||||||
|
|
||||||
@@ -472,7 +409,9 @@ class Command(BaseCommand):
|
|||||||
check2_history = CheckHistory()
|
check2_history = CheckHistory()
|
||||||
check2_history.check_id = check2.pk
|
check2_history.check_id = check2.pk
|
||||||
check2_history.agent_id = agent.agent_id
|
check2_history.agent_id = agent.agent_id
|
||||||
check2_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
check2_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
if site in sites5:
|
if site in sites5:
|
||||||
check2_history.y = 1
|
check2_history.y = 1
|
||||||
check2_history.results = ping_fail_output
|
check2_history.results = ping_fail_output
|
||||||
@@ -483,19 +422,13 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
# cpu load check
|
# cpu load check
|
||||||
check3 = Check()
|
check3 = Check()
|
||||||
|
check_result3 = CheckResult(assigned_check=check3, agent=agent)
|
||||||
check3.agent = agent
|
check3.agent = agent
|
||||||
check3.check_type = CheckType.CPU_LOAD
|
check3.check_type = "cpuload"
|
||||||
|
check_result3.status = "passing"
|
||||||
|
check_result3.last_run = djangotime.now()
|
||||||
check3.warning_threshold = 70
|
check3.warning_threshold = 70
|
||||||
check3.error_threshold = 90
|
check3.error_threshold = 90
|
||||||
check3.email_alert = random.choice([True, False])
|
|
||||||
check3.text_alert = random.choice([True, False])
|
|
||||||
check3.save()
|
|
||||||
|
|
||||||
check_result3 = CheckResult()
|
|
||||||
check_result3.agent = agent
|
|
||||||
check_result3.assigned_check = check3
|
|
||||||
check_result3.status = CheckStatus.PASSING
|
|
||||||
check_result3.last_run = django_now
|
|
||||||
check_result3.history = [
|
check_result3.history = [
|
||||||
15,
|
15,
|
||||||
23,
|
23,
|
||||||
@@ -512,116 +445,101 @@ class Command(BaseCommand):
|
|||||||
13,
|
13,
|
||||||
34,
|
34,
|
||||||
]
|
]
|
||||||
|
check3.email_alert = random.choice([True, False])
|
||||||
|
check3.text_alert = random.choice([True, False])
|
||||||
|
check3.save()
|
||||||
check_result3.save()
|
check_result3.save()
|
||||||
|
|
||||||
for i in range(30):
|
for i in range(30):
|
||||||
check3_history = CheckHistory()
|
check3_history = CheckHistory()
|
||||||
check3_history.check_id = check3.pk
|
check3_history.check_id = check3.pk
|
||||||
check3_history.agent_id = agent.agent_id
|
check3_history.agent_id = agent.agent_id
|
||||||
check3_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
check3_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
check3_history.y = random.randint(2, 79)
|
check3_history.y = random.randint(2, 79)
|
||||||
check3_history.save()
|
check3_history.save()
|
||||||
|
|
||||||
# memory check
|
# memory check
|
||||||
check4 = Check()
|
check4 = Check()
|
||||||
|
check_result4 = CheckResult(assigned_check=check4, agent=agent)
|
||||||
check4.agent = agent
|
check4.agent = agent
|
||||||
check4.check_type = CheckType.MEMORY
|
check4.check_type = "memory"
|
||||||
|
check_result4.status = "passing"
|
||||||
check4.warning_threshold = 70
|
check4.warning_threshold = 70
|
||||||
check4.error_threshold = 85
|
check4.error_threshold = 85
|
||||||
|
check_result4.history = [34, 34, 35, 36, 34, 34, 34, 34, 34, 34]
|
||||||
check4.email_alert = random.choice([True, False])
|
check4.email_alert = random.choice([True, False])
|
||||||
check4.text_alert = random.choice([True, False])
|
check4.text_alert = random.choice([True, False])
|
||||||
check4.save()
|
check4.save()
|
||||||
|
|
||||||
check_result4 = CheckResult()
|
|
||||||
check_result4.agent = agent
|
|
||||||
check_result4.assigned_check = check4
|
|
||||||
check_result4.status = CheckStatus.PASSING
|
|
||||||
check_result4.last_run = django_now
|
|
||||||
check_result4.history = [34, 34, 35, 36, 34, 34, 34, 34, 34, 34]
|
|
||||||
check_result4.save()
|
check_result4.save()
|
||||||
|
|
||||||
for i in range(30):
|
for i in range(30):
|
||||||
check4_history = CheckHistory()
|
check4_history = CheckHistory()
|
||||||
check4_history.check_id = check4.pk
|
check4_history.check_id = check4.pk
|
||||||
check4_history.agent_id = agent.agent_id
|
check4_history.agent_id = agent.agent_id
|
||||||
check4_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
check4_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
check4_history.y = random.randint(2, 79)
|
check4_history.y = random.randint(2, 79)
|
||||||
check4_history.save()
|
check4_history.save()
|
||||||
|
|
||||||
# script check storage pool
|
# script check storage pool
|
||||||
check5 = Check()
|
check5 = Check()
|
||||||
|
check_result5 = CheckResult(assigned_check=check5, agent=agent)
|
||||||
check5.agent = agent
|
check5.agent = agent
|
||||||
check5.check_type = CheckType.SCRIPT
|
check5.check_type = "script"
|
||||||
|
check_result5.status = "passing"
|
||||||
|
check_result5.last_run = djangotime.now()
|
||||||
check5.email_alert = random.choice([True, False])
|
check5.email_alert = random.choice([True, False])
|
||||||
check5.text_alert = random.choice([True, False])
|
check5.text_alert = random.choice([True, False])
|
||||||
check5.timeout = 120
|
check5.timeout = 120
|
||||||
|
|
||||||
check5.script = check_pool_health
|
|
||||||
check5.save()
|
|
||||||
|
|
||||||
check_result5 = CheckResult()
|
|
||||||
check_result5.agent = agent
|
|
||||||
check_result5.assigned_check = check5
|
|
||||||
check_result5.status = CheckStatus.PASSING
|
|
||||||
check_result5.last_run = django_now
|
|
||||||
check_result5.retcode = 0
|
check_result5.retcode = 0
|
||||||
check_result5.execution_time = "4.0000"
|
check_result5.execution_time = "4.0000"
|
||||||
|
check5.script = check_pool_health
|
||||||
|
check5.save()
|
||||||
check_result5.save()
|
check_result5.save()
|
||||||
|
|
||||||
for i in range(30):
|
for i in range(30):
|
||||||
check5_history = CheckHistory()
|
check5_history = CheckHistory()
|
||||||
check5_history.check_id = check5.pk
|
check5_history.check_id = check5.pk
|
||||||
check5_history.agent_id = agent.agent_id
|
check5_history.agent_id = agent.agent_id
|
||||||
check5_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
check5_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
if i == 10 or i == 18:
|
if i == 10 or i == 18:
|
||||||
check5_history.y = 1
|
check5_history.y = 1
|
||||||
else:
|
else:
|
||||||
check5_history.y = 0
|
check5_history.y = 0
|
||||||
check5_history.results = {
|
|
||||||
"retcode": 0,
|
|
||||||
"stdout": None,
|
|
||||||
"stderr": None,
|
|
||||||
"execution_time": "4.0000",
|
|
||||||
}
|
|
||||||
check5_history.save()
|
check5_history.save()
|
||||||
|
|
||||||
check6 = Check()
|
check6 = Check()
|
||||||
|
check_result6 = CheckResult(assigned_check=check6, agent=agent)
|
||||||
check6.agent = agent
|
check6.agent = agent
|
||||||
check6.check_type = CheckType.SCRIPT
|
check6.check_type = "script"
|
||||||
|
check_result6.status = "passing"
|
||||||
|
check_result6.last_run = djangotime.now()
|
||||||
check6.email_alert = random.choice([True, False])
|
check6.email_alert = random.choice([True, False])
|
||||||
check6.text_alert = random.choice([True, False])
|
check6.text_alert = random.choice([True, False])
|
||||||
check6.timeout = 120
|
check6.timeout = 120
|
||||||
check6.script = check_net_aware
|
|
||||||
check6.save()
|
|
||||||
|
|
||||||
check_result6 = CheckResult()
|
|
||||||
check_result6.agent = agent
|
|
||||||
check_result6.assigned_check = check6
|
|
||||||
check_result6.status = CheckStatus.PASSING
|
|
||||||
check_result6.last_run = django_now
|
|
||||||
check_result6.retcode = 0
|
check_result6.retcode = 0
|
||||||
check_result6.execution_time = "4.0000"
|
check_result6.execution_time = "4.0000"
|
||||||
|
check6.script = check_net_aware
|
||||||
|
check6.save()
|
||||||
check_result6.save()
|
check_result6.save()
|
||||||
|
|
||||||
for i in range(30):
|
for i in range(30):
|
||||||
check6_history = CheckHistory()
|
check6_history = CheckHistory()
|
||||||
check6_history.check_id = check6.pk
|
check6_history.check_id = check6.pk
|
||||||
check6_history.agent_id = agent.agent_id
|
check6_history.agent_id = agent.agent_id
|
||||||
check6_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
check6_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
check6_history.y = 0
|
check6_history.y = 0
|
||||||
check6_history.results = {
|
|
||||||
"retcode": 0,
|
|
||||||
"stdout": None,
|
|
||||||
"stderr": None,
|
|
||||||
"execution_time": "4.0000",
|
|
||||||
}
|
|
||||||
check6_history.save()
|
check6_history.save()
|
||||||
|
|
||||||
nla_task = AutomatedTask()
|
nla_task = AutomatedTask()
|
||||||
|
nla_task_result = TaskResult(task=nla_task, agent=agent)
|
||||||
nla_task.agent = agent
|
nla_task.agent = agent
|
||||||
actions = [
|
actions = [
|
||||||
{
|
{
|
||||||
@@ -635,21 +553,17 @@ class Command(BaseCommand):
|
|||||||
nla_task.actions = actions
|
nla_task.actions = actions
|
||||||
nla_task.assigned_check = check6
|
nla_task.assigned_check = check6
|
||||||
nla_task.name = "Restart NLA"
|
nla_task.name = "Restart NLA"
|
||||||
nla_task.task_type = TaskType.CHECK_FAILURE
|
nla_task.task_type = "checkfailure"
|
||||||
nla_task.save()
|
|
||||||
|
|
||||||
nla_task_result = TaskResult()
|
|
||||||
nla_task_result.task = nla_task
|
|
||||||
nla_task_result.agent = agent
|
|
||||||
nla_task_result.execution_time = "1.8443"
|
nla_task_result.execution_time = "1.8443"
|
||||||
nla_task_result.last_run = django_now
|
nla_task_result.last_run = djangotime.now()
|
||||||
nla_task_result.stdout = "no stdout"
|
nla_task_result.stdout = "no stdout"
|
||||||
nla_task_result.retcode = 0
|
nla_task_result.retcode = 0
|
||||||
nla_task_result.sync_status = TaskSyncStatus.SYNCED
|
nla_task_result.sync_status = "synced"
|
||||||
|
nla_task.save()
|
||||||
nla_task_result.save()
|
nla_task_result.save()
|
||||||
|
|
||||||
spool_task = AutomatedTask()
|
spool_task = AutomatedTask()
|
||||||
|
spool_task_result = TaskResult(task=spool_task, agent=agent)
|
||||||
spool_task.agent = agent
|
spool_task.agent = agent
|
||||||
actions = [
|
actions = [
|
||||||
{
|
{
|
||||||
@@ -662,26 +576,25 @@ class Command(BaseCommand):
|
|||||||
]
|
]
|
||||||
spool_task.actions = actions
|
spool_task.actions = actions
|
||||||
spool_task.name = "Clear the print spooler"
|
spool_task.name = "Clear the print spooler"
|
||||||
spool_task.task_type = TaskType.DAILY
|
spool_task.task_type = "daily"
|
||||||
spool_task.run_time_date = django_now + djangotime.timedelta(minutes=10)
|
spool_task.run_time_date = djangotime.now() + djangotime.timedelta(
|
||||||
spool_task.expire_date = django_now + djangotime.timedelta(days=753)
|
minutes=10
|
||||||
|
)
|
||||||
|
spool_task.expire_date = djangotime.now() + djangotime.timedelta(days=753)
|
||||||
spool_task.daily_interval = 1
|
spool_task.daily_interval = 1
|
||||||
spool_task.weekly_interval = 1
|
spool_task.weekly_interval = 1
|
||||||
spool_task.task_repetition_duration = "2h"
|
spool_task.task_repetition_duration = "2h"
|
||||||
spool_task.task_repetition_interval = "25m"
|
spool_task.task_repetition_interval = "25m"
|
||||||
spool_task.random_task_delay = "3m"
|
spool_task.random_task_delay = "3m"
|
||||||
spool_task.save()
|
spool_task_result.last_run = djangotime.now()
|
||||||
|
|
||||||
spool_task_result = TaskResult()
|
|
||||||
spool_task_result.task = spool_task
|
|
||||||
spool_task_result.agent = agent
|
|
||||||
spool_task_result.last_run = django_now
|
|
||||||
spool_task_result.retcode = 0
|
spool_task_result.retcode = 0
|
||||||
spool_task_result.stdout = spooler_stdout
|
spool_task_result.stdout = spooler_stdout
|
||||||
spool_task_result.sync_status = TaskSyncStatus.SYNCED
|
spool_task_result.sync_status = "synced"
|
||||||
|
spool_task.save()
|
||||||
spool_task_result.save()
|
spool_task_result.save()
|
||||||
|
|
||||||
tmp_dir_task = AutomatedTask()
|
tmp_dir_task = AutomatedTask()
|
||||||
|
tmp_dir_task_result = TaskResult(task=tmp_dir_task, agent=agent)
|
||||||
tmp_dir_task.agent = agent
|
tmp_dir_task.agent = agent
|
||||||
tmp_dir_task.name = "show temp dir files"
|
tmp_dir_task.name = "show temp dir files"
|
||||||
actions = [
|
actions = [
|
||||||
@@ -694,81 +607,64 @@ class Command(BaseCommand):
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
tmp_dir_task.actions = actions
|
tmp_dir_task.actions = actions
|
||||||
tmp_dir_task.task_type = TaskType.MANUAL
|
tmp_dir_task.task_type = "manual"
|
||||||
tmp_dir_task.save()
|
tmp_dir_task_result.last_run = djangotime.now()
|
||||||
|
|
||||||
tmp_dir_task_result = TaskResult()
|
|
||||||
tmp_dir_task_result.task = tmp_dir_task
|
|
||||||
tmp_dir_task_result.agent = agent
|
|
||||||
tmp_dir_task_result.last_run = django_now
|
|
||||||
tmp_dir_task_result.stdout = temp_dir_stdout
|
tmp_dir_task_result.stdout = temp_dir_stdout
|
||||||
tmp_dir_task_result.retcode = 0
|
tmp_dir_task_result.retcode = 0
|
||||||
tmp_dir_task_result.sync_status = TaskSyncStatus.SYNCED
|
tmp_dir_task_result.sync_status = "synced"
|
||||||
|
tmp_dir_task.save()
|
||||||
tmp_dir_task_result.save()
|
tmp_dir_task_result.save()
|
||||||
|
|
||||||
check7 = Check()
|
check7 = Check()
|
||||||
|
check_result7 = CheckResult(assigned_check=check7, agent=agent)
|
||||||
check7.agent = agent
|
check7.agent = agent
|
||||||
check7.check_type = CheckType.SCRIPT
|
check7.check_type = "script"
|
||||||
|
check_result7.status = "passing"
|
||||||
|
check_result7.last_run = djangotime.now()
|
||||||
check7.email_alert = random.choice([True, False])
|
check7.email_alert = random.choice([True, False])
|
||||||
check7.text_alert = random.choice([True, False])
|
check7.text_alert = random.choice([True, False])
|
||||||
check7.timeout = 120
|
check7.timeout = 120
|
||||||
|
|
||||||
check7.script = clear_spool
|
|
||||||
|
|
||||||
check7.save()
|
|
||||||
|
|
||||||
check_result7 = CheckResult()
|
|
||||||
check_result7.assigned_check = check7
|
|
||||||
check_result7.agent = agent
|
|
||||||
check_result7.status = CheckStatus.PASSING
|
|
||||||
check_result7.last_run = django_now
|
|
||||||
check_result7.retcode = 0
|
check_result7.retcode = 0
|
||||||
check_result7.execution_time = "3.1337"
|
check_result7.execution_time = "3.1337"
|
||||||
|
check7.script = clear_spool
|
||||||
check_result7.stdout = spooler_stdout
|
check_result7.stdout = spooler_stdout
|
||||||
|
check7.save()
|
||||||
check_result7.save()
|
check_result7.save()
|
||||||
|
|
||||||
for i in range(30):
|
for i in range(30):
|
||||||
check7_history = CheckHistory()
|
check7_history = CheckHistory()
|
||||||
check7_history.check_id = check7.pk
|
check7_history.check_id = check7.pk
|
||||||
check7_history.agent_id = agent.agent_id
|
check7_history.agent_id = agent.agent_id
|
||||||
check7_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
check7_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
check7_history.y = 0
|
check7_history.y = 0
|
||||||
check7_history.results = {
|
|
||||||
"retcode": 0,
|
|
||||||
"stdout": spooler_stdout,
|
|
||||||
"stderr": None,
|
|
||||||
"execution_time": "3.1337",
|
|
||||||
}
|
|
||||||
check7_history.save()
|
check7_history.save()
|
||||||
|
|
||||||
if agent.plat == AgentPlat.WINDOWS:
|
|
||||||
check8 = Check()
|
check8 = Check()
|
||||||
|
check_result8 = CheckResult(assigned_check=check8, agent=agent)
|
||||||
check8.agent = agent
|
check8.agent = agent
|
||||||
check8.check_type = CheckType.WINSVC
|
check8.check_type = "winsvc"
|
||||||
|
check_result8.status = "passing"
|
||||||
|
check_result8.last_run = djangotime.now()
|
||||||
check8.email_alert = random.choice([True, False])
|
check8.email_alert = random.choice([True, False])
|
||||||
check8.text_alert = random.choice([True, False])
|
check8.text_alert = random.choice([True, False])
|
||||||
|
check_result8.more_info = "Status RUNNING"
|
||||||
check8.fails_b4_alert = 4
|
check8.fails_b4_alert = 4
|
||||||
check8.svc_name = "Spooler"
|
check8.svc_name = "Spooler"
|
||||||
check8.svc_display_name = "Print Spooler"
|
check8.svc_display_name = "Print Spooler"
|
||||||
check8.pass_if_start_pending = False
|
check8.pass_if_start_pending = False
|
||||||
check8.restart_if_stopped = True
|
check8.restart_if_stopped = True
|
||||||
check8.save()
|
check8.save()
|
||||||
|
|
||||||
check_result8 = CheckResult()
|
|
||||||
check_result8.assigned_check = check8
|
|
||||||
check_result8.agent = agent
|
|
||||||
check_result8.status = CheckStatus.PASSING
|
|
||||||
check_result8.last_run = django_now
|
|
||||||
check_result8.more_info = "Status RUNNING"
|
|
||||||
check_result8.save()
|
check_result8.save()
|
||||||
|
|
||||||
for i in range(30):
|
for i in range(30):
|
||||||
check8_history = CheckHistory()
|
check8_history = CheckHistory()
|
||||||
check8_history.check_id = check8.pk
|
check8_history.check_id = check8.pk
|
||||||
check8_history.agent_id = agent.agent_id
|
check8_history.agent_id = agent.agent_id
|
||||||
check8_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
check8_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
if i == 10 or i == 18:
|
if i == 10 or i == 18:
|
||||||
check8_history.y = 1
|
check8_history.y = 1
|
||||||
check8_history.results = "Status STOPPED"
|
check8_history.results = "Status STOPPED"
|
||||||
@@ -778,29 +674,28 @@ class Command(BaseCommand):
|
|||||||
check8_history.save()
|
check8_history.save()
|
||||||
|
|
||||||
check9 = Check()
|
check9 = Check()
|
||||||
|
check_result9 = CheckResult(assigned_check=check9, agent=agent)
|
||||||
check9.agent = agent
|
check9.agent = agent
|
||||||
check9.check_type = CheckType.EVENT_LOG
|
check9.check_type = "eventlog"
|
||||||
check9.name = "unexpected shutdown"
|
check9.name = "unexpected shutdown"
|
||||||
|
|
||||||
|
check_result9.last_run = djangotime.now()
|
||||||
check9.email_alert = random.choice([True, False])
|
check9.email_alert = random.choice([True, False])
|
||||||
check9.text_alert = random.choice([True, False])
|
check9.text_alert = random.choice([True, False])
|
||||||
check9.fails_b4_alert = 2
|
check9.fails_b4_alert = 2
|
||||||
check9.log_name = EvtLogNames.APPLICATION
|
|
||||||
check9.event_id = 1001
|
|
||||||
check9.event_type = EvtLogTypes.INFO
|
|
||||||
check9.fail_when = EvtLogFailWhen.CONTAINS
|
|
||||||
check9.search_last_days = 30
|
|
||||||
|
|
||||||
check_result9 = CheckResult()
|
|
||||||
check_result9.agent = agent
|
|
||||||
check_result9.assigned_check = check9
|
|
||||||
|
|
||||||
check_result9.last_run = django_now
|
|
||||||
if site in sites5:
|
if site in sites5:
|
||||||
check_result9.extra_details = eventlog_check_fail_data
|
check_result9.extra_details = eventlog_check_fail_data
|
||||||
check_result9.status = CheckStatus.FAILING
|
check_result9.status = "failing"
|
||||||
else:
|
else:
|
||||||
check_result9.extra_details = {"log": []}
|
check_result9.extra_details = {"log": []}
|
||||||
check_result9.status = CheckStatus.PASSING
|
check_result9.status = "passing"
|
||||||
|
|
||||||
|
check9.log_name = "Application"
|
||||||
|
check9.event_id = 1001
|
||||||
|
check9.event_type = "INFO"
|
||||||
|
check9.fail_when = "contains"
|
||||||
|
check9.search_last_days = 30
|
||||||
|
|
||||||
check9.save()
|
check9.save()
|
||||||
check_result9.save()
|
check_result9.save()
|
||||||
@@ -809,7 +704,9 @@ class Command(BaseCommand):
|
|||||||
check9_history = CheckHistory()
|
check9_history = CheckHistory()
|
||||||
check9_history.check_id = check9.pk
|
check9_history.check_id = check9.pk
|
||||||
check9_history.agent_id = agent.agent_id
|
check9_history.agent_id = agent.agent_id
|
||||||
check9_history.x = django_now - djangotime.timedelta(minutes=i * 2)
|
check9_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
if i == 10 or i == 18:
|
if i == 10 or i == 18:
|
||||||
check9_history.y = 1
|
check9_history.y = 1
|
||||||
check9_history.results = "Events Found: 16"
|
check9_history.results = "Events Found: 16"
|
||||||
@@ -821,7 +718,8 @@ class Command(BaseCommand):
|
|||||||
pick = random.randint(1, 10)
|
pick = random.randint(1, 10)
|
||||||
|
|
||||||
if pick == 5 or pick == 3:
|
if pick == 5 or pick == 3:
|
||||||
reboot_time = django_now + djangotime.timedelta(
|
|
||||||
|
reboot_time = djangotime.now() + djangotime.timedelta(
|
||||||
minutes=random.randint(1000, 500000)
|
minutes=random.randint(1000, 500000)
|
||||||
)
|
)
|
||||||
date_obj = dt.datetime.strftime(reboot_time, "%Y-%m-%d %H:%M")
|
date_obj = dt.datetime.strftime(reboot_time, "%Y-%m-%d %H:%M")
|
||||||
@@ -834,7 +732,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
sched_reboot = PendingAction()
|
sched_reboot = PendingAction()
|
||||||
sched_reboot.agent = agent
|
sched_reboot.agent = agent
|
||||||
sched_reboot.action_type = PAAction.SCHED_REBOOT
|
sched_reboot.action_type = "schedreboot"
|
||||||
sched_reboot.details = {
|
sched_reboot.details = {
|
||||||
"time": str(obj),
|
"time": str(obj),
|
||||||
"taskname": task_name,
|
"taskname": task_name,
|
||||||
@@ -843,4 +741,5 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
self.stdout.write(self.style.SUCCESS(f"Added agent # {count_agents + 1}"))
|
self.stdout.write(self.style.SUCCESS(f"Added agent # {count_agents + 1}"))
|
||||||
|
|
||||||
|
call_command("load_demo_scripts")
|
||||||
self.stdout.write("done")
|
self.stdout.write("done")
|
||||||
|
|||||||
@@ -1,30 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from tacticalrmm.constants import AGENT_DEFER
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Find all agents that have a certain service installed"
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
|
||||||
parser.add_argument("name", type=str)
|
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
|
||||||
search = kwargs["name"].lower()
|
|
||||||
|
|
||||||
agents = Agent.objects.defer(*AGENT_DEFER)
|
|
||||||
for agent in agents:
|
|
||||||
try:
|
|
||||||
for svc in agent.services:
|
|
||||||
if (
|
|
||||||
search in svc["name"].lower()
|
|
||||||
or search in svc["display_name"].lower()
|
|
||||||
):
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS(
|
|
||||||
f"{agent.hostname} - {svc['name']} ({svc['display_name']}) - {svc['status']}"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from tacticalrmm.constants import AGENT_DEFER
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
def find_duplicates(self, lst):
|
|
||||||
return list(set([item for item in lst if lst.count(item) > 1]))
|
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
|
||||||
for agent in Agent.objects.defer(*AGENT_DEFER).prefetch_related(
|
|
||||||
"custom_fields__field"
|
|
||||||
):
|
|
||||||
if dupes := self.find_duplicates(
|
|
||||||
[i.field.name for i in agent.custom_fields.all()]
|
|
||||||
):
|
|
||||||
for dupe in dupes:
|
|
||||||
cf = list(
|
|
||||||
agent.custom_fields.filter(field__name=dupe).order_by("id")
|
|
||||||
)
|
|
||||||
to_delete = cf[:-1]
|
|
||||||
for i in to_delete:
|
|
||||||
i.delete()
|
|
||||||
@@ -1,17 +1,16 @@
|
|||||||
|
from agents.models import Agent
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from tacticalrmm.constants import AGENT_STATUS_ONLINE, ONLINE_AGENTS
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "Shows online agents that are not on the latest version"
|
help = "Shows online agents that are not on the latest version"
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
only = ONLINE_AGENTS + ("hostname",)
|
q = Agent.objects.exclude(version=settings.LATEST_AGENT_VER).only(
|
||||||
q = Agent.objects.exclude(version=settings.LATEST_AGENT_VER).only(*only)
|
"pk", "version", "last_seen", "overdue_time", "offline_time"
|
||||||
agents = [i for i in q if i.status == AGENT_STATUS_ONLINE]
|
)
|
||||||
|
agents = [i for i in q if i.status == "online"]
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.SUCCESS(f"{agent.hostname} - v{agent.version}")
|
self.style.SUCCESS(f"{agent.hostname} - v{agent.version}")
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
|
from agents.models import Agent
|
||||||
|
from agents.tasks import send_agent_update_task
|
||||||
|
from core.utils import get_core_settings
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from agents.tasks import send_agent_update_task
|
|
||||||
from core.utils import get_core_settings, token_is_valid
|
|
||||||
from tacticalrmm.constants import AGENT_DEFER
|
from tacticalrmm.constants import AGENT_DEFER
|
||||||
|
|
||||||
|
|
||||||
@@ -22,5 +22,4 @@ class Command(BaseCommand):
|
|||||||
for i in q
|
for i in q
|
||||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
]
|
]
|
||||||
token, _ = token_is_valid()
|
send_agent_update_task.delay(agent_ids=agent_ids)
|
||||||
send_agent_update_task.delay(agent_ids=agent_ids, token=token, force=False)
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 4.0.3 on 2022-04-07 17:28
|
# Generated by Django 4.0.3 on 2022-04-07 17:28
|
||||||
|
|
||||||
import django.db.models.deletion
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -1,17 +0,0 @@
|
|||||||
# Generated by Django 4.0.4 on 2022-04-25 06:51
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('agents', '0049_agent_agents_agen_monitor_df8816_idx'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name='agent',
|
|
||||||
name='plat_release',
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 4.0.4 on 2022-05-18 03:50
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('agents', '0050_remove_agent_plat_release'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='agent',
|
|
||||||
name='plat',
|
|
||||||
field=models.CharField(choices=[('windows', 'Windows'), ('linux', 'Linux'), ('darwin', 'macOS')], default='windows', max_length=255),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 4.0.4 on 2022-05-18 05:28
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('agents', '0051_alter_agent_plat'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='agent',
|
|
||||||
name='monitoring_type',
|
|
||||||
field=models.CharField(choices=[('server', 'Server'), ('workstation', 'Workstation')], default='server', max_length=30),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
# Generated by Django 4.0.4 on 2022-05-18 06:10
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('agents', '0052_alter_agent_monitoring_type'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name='agenthistory',
|
|
||||||
name='status',
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 4.0.4 on 2022-06-06 04:03
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('agents', '0053_remove_agenthistory_status'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='agent',
|
|
||||||
name='goarch',
|
|
||||||
field=models.CharField(blank=True, choices=[('amd64', 'amd64'), ('386', '386'), ('arm64', 'arm64'), ('arm', 'arm')], max_length=255, null=True),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,631 +0,0 @@
|
|||||||
# Generated by Django 4.1 on 2022-08-24 07:32
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("agents", "0054_alter_agent_goarch"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="agent",
|
|
||||||
name="time_zone",
|
|
||||||
field=models.CharField(
|
|
||||||
blank=True,
|
|
||||||
choices=[
|
|
||||||
("Africa/Abidjan", "Africa/Abidjan"),
|
|
||||||
("Africa/Accra", "Africa/Accra"),
|
|
||||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
|
||||||
("Africa/Algiers", "Africa/Algiers"),
|
|
||||||
("Africa/Asmara", "Africa/Asmara"),
|
|
||||||
("Africa/Asmera", "Africa/Asmera"),
|
|
||||||
("Africa/Bamako", "Africa/Bamako"),
|
|
||||||
("Africa/Bangui", "Africa/Bangui"),
|
|
||||||
("Africa/Banjul", "Africa/Banjul"),
|
|
||||||
("Africa/Bissau", "Africa/Bissau"),
|
|
||||||
("Africa/Blantyre", "Africa/Blantyre"),
|
|
||||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
|
||||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
|
||||||
("Africa/Cairo", "Africa/Cairo"),
|
|
||||||
("Africa/Casablanca", "Africa/Casablanca"),
|
|
||||||
("Africa/Ceuta", "Africa/Ceuta"),
|
|
||||||
("Africa/Conakry", "Africa/Conakry"),
|
|
||||||
("Africa/Dakar", "Africa/Dakar"),
|
|
||||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
|
||||||
("Africa/Djibouti", "Africa/Djibouti"),
|
|
||||||
("Africa/Douala", "Africa/Douala"),
|
|
||||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
|
||||||
("Africa/Freetown", "Africa/Freetown"),
|
|
||||||
("Africa/Gaborone", "Africa/Gaborone"),
|
|
||||||
("Africa/Harare", "Africa/Harare"),
|
|
||||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
|
||||||
("Africa/Juba", "Africa/Juba"),
|
|
||||||
("Africa/Kampala", "Africa/Kampala"),
|
|
||||||
("Africa/Khartoum", "Africa/Khartoum"),
|
|
||||||
("Africa/Kigali", "Africa/Kigali"),
|
|
||||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
|
||||||
("Africa/Lagos", "Africa/Lagos"),
|
|
||||||
("Africa/Libreville", "Africa/Libreville"),
|
|
||||||
("Africa/Lome", "Africa/Lome"),
|
|
||||||
("Africa/Luanda", "Africa/Luanda"),
|
|
||||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
|
||||||
("Africa/Lusaka", "Africa/Lusaka"),
|
|
||||||
("Africa/Malabo", "Africa/Malabo"),
|
|
||||||
("Africa/Maputo", "Africa/Maputo"),
|
|
||||||
("Africa/Maseru", "Africa/Maseru"),
|
|
||||||
("Africa/Mbabane", "Africa/Mbabane"),
|
|
||||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
|
||||||
("Africa/Monrovia", "Africa/Monrovia"),
|
|
||||||
("Africa/Nairobi", "Africa/Nairobi"),
|
|
||||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
|
||||||
("Africa/Niamey", "Africa/Niamey"),
|
|
||||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
|
||||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
|
||||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
|
||||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
|
||||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
|
||||||
("Africa/Tripoli", "Africa/Tripoli"),
|
|
||||||
("Africa/Tunis", "Africa/Tunis"),
|
|
||||||
("Africa/Windhoek", "Africa/Windhoek"),
|
|
||||||
("America/Adak", "America/Adak"),
|
|
||||||
("America/Anchorage", "America/Anchorage"),
|
|
||||||
("America/Anguilla", "America/Anguilla"),
|
|
||||||
("America/Antigua", "America/Antigua"),
|
|
||||||
("America/Araguaina", "America/Araguaina"),
|
|
||||||
(
|
|
||||||
"America/Argentina/Buenos_Aires",
|
|
||||||
"America/Argentina/Buenos_Aires",
|
|
||||||
),
|
|
||||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
|
||||||
(
|
|
||||||
"America/Argentina/ComodRivadavia",
|
|
||||||
"America/Argentina/ComodRivadavia",
|
|
||||||
),
|
|
||||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
|
||||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
|
||||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
|
||||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
|
||||||
(
|
|
||||||
"America/Argentina/Rio_Gallegos",
|
|
||||||
"America/Argentina/Rio_Gallegos",
|
|
||||||
),
|
|
||||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
|
||||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
|
||||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
|
||||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
|
||||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
|
||||||
("America/Aruba", "America/Aruba"),
|
|
||||||
("America/Asuncion", "America/Asuncion"),
|
|
||||||
("America/Atikokan", "America/Atikokan"),
|
|
||||||
("America/Atka", "America/Atka"),
|
|
||||||
("America/Bahia", "America/Bahia"),
|
|
||||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
|
||||||
("America/Barbados", "America/Barbados"),
|
|
||||||
("America/Belem", "America/Belem"),
|
|
||||||
("America/Belize", "America/Belize"),
|
|
||||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
|
||||||
("America/Boa_Vista", "America/Boa_Vista"),
|
|
||||||
("America/Bogota", "America/Bogota"),
|
|
||||||
("America/Boise", "America/Boise"),
|
|
||||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
|
||||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
|
||||||
("America/Campo_Grande", "America/Campo_Grande"),
|
|
||||||
("America/Cancun", "America/Cancun"),
|
|
||||||
("America/Caracas", "America/Caracas"),
|
|
||||||
("America/Catamarca", "America/Catamarca"),
|
|
||||||
("America/Cayenne", "America/Cayenne"),
|
|
||||||
("America/Cayman", "America/Cayman"),
|
|
||||||
("America/Chicago", "America/Chicago"),
|
|
||||||
("America/Chihuahua", "America/Chihuahua"),
|
|
||||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
|
||||||
("America/Cordoba", "America/Cordoba"),
|
|
||||||
("America/Costa_Rica", "America/Costa_Rica"),
|
|
||||||
("America/Creston", "America/Creston"),
|
|
||||||
("America/Cuiaba", "America/Cuiaba"),
|
|
||||||
("America/Curacao", "America/Curacao"),
|
|
||||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
|
||||||
("America/Dawson", "America/Dawson"),
|
|
||||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
|
||||||
("America/Denver", "America/Denver"),
|
|
||||||
("America/Detroit", "America/Detroit"),
|
|
||||||
("America/Dominica", "America/Dominica"),
|
|
||||||
("America/Edmonton", "America/Edmonton"),
|
|
||||||
("America/Eirunepe", "America/Eirunepe"),
|
|
||||||
("America/El_Salvador", "America/El_Salvador"),
|
|
||||||
("America/Ensenada", "America/Ensenada"),
|
|
||||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
|
||||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
|
||||||
("America/Fortaleza", "America/Fortaleza"),
|
|
||||||
("America/Glace_Bay", "America/Glace_Bay"),
|
|
||||||
("America/Godthab", "America/Godthab"),
|
|
||||||
("America/Goose_Bay", "America/Goose_Bay"),
|
|
||||||
("America/Grand_Turk", "America/Grand_Turk"),
|
|
||||||
("America/Grenada", "America/Grenada"),
|
|
||||||
("America/Guadeloupe", "America/Guadeloupe"),
|
|
||||||
("America/Guatemala", "America/Guatemala"),
|
|
||||||
("America/Guayaquil", "America/Guayaquil"),
|
|
||||||
("America/Guyana", "America/Guyana"),
|
|
||||||
("America/Halifax", "America/Halifax"),
|
|
||||||
("America/Havana", "America/Havana"),
|
|
||||||
("America/Hermosillo", "America/Hermosillo"),
|
|
||||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
|
||||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
|
||||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
|
||||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
|
||||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
|
||||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
|
||||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
|
||||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
|
||||||
("America/Indianapolis", "America/Indianapolis"),
|
|
||||||
("America/Inuvik", "America/Inuvik"),
|
|
||||||
("America/Iqaluit", "America/Iqaluit"),
|
|
||||||
("America/Jamaica", "America/Jamaica"),
|
|
||||||
("America/Jujuy", "America/Jujuy"),
|
|
||||||
("America/Juneau", "America/Juneau"),
|
|
||||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
|
||||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
|
||||||
("America/Knox_IN", "America/Knox_IN"),
|
|
||||||
("America/Kralendijk", "America/Kralendijk"),
|
|
||||||
("America/La_Paz", "America/La_Paz"),
|
|
||||||
("America/Lima", "America/Lima"),
|
|
||||||
("America/Los_Angeles", "America/Los_Angeles"),
|
|
||||||
("America/Louisville", "America/Louisville"),
|
|
||||||
("America/Lower_Princes", "America/Lower_Princes"),
|
|
||||||
("America/Maceio", "America/Maceio"),
|
|
||||||
("America/Managua", "America/Managua"),
|
|
||||||
("America/Manaus", "America/Manaus"),
|
|
||||||
("America/Marigot", "America/Marigot"),
|
|
||||||
("America/Martinique", "America/Martinique"),
|
|
||||||
("America/Matamoros", "America/Matamoros"),
|
|
||||||
("America/Mazatlan", "America/Mazatlan"),
|
|
||||||
("America/Mendoza", "America/Mendoza"),
|
|
||||||
("America/Menominee", "America/Menominee"),
|
|
||||||
("America/Merida", "America/Merida"),
|
|
||||||
("America/Metlakatla", "America/Metlakatla"),
|
|
||||||
("America/Mexico_City", "America/Mexico_City"),
|
|
||||||
("America/Miquelon", "America/Miquelon"),
|
|
||||||
("America/Moncton", "America/Moncton"),
|
|
||||||
("America/Monterrey", "America/Monterrey"),
|
|
||||||
("America/Montevideo", "America/Montevideo"),
|
|
||||||
("America/Montreal", "America/Montreal"),
|
|
||||||
("America/Montserrat", "America/Montserrat"),
|
|
||||||
("America/Nassau", "America/Nassau"),
|
|
||||||
("America/New_York", "America/New_York"),
|
|
||||||
("America/Nipigon", "America/Nipigon"),
|
|
||||||
("America/Nome", "America/Nome"),
|
|
||||||
("America/Noronha", "America/Noronha"),
|
|
||||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
|
||||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
|
||||||
(
|
|
||||||
"America/North_Dakota/New_Salem",
|
|
||||||
"America/North_Dakota/New_Salem",
|
|
||||||
),
|
|
||||||
("America/Nuuk", "America/Nuuk"),
|
|
||||||
("America/Ojinaga", "America/Ojinaga"),
|
|
||||||
("America/Panama", "America/Panama"),
|
|
||||||
("America/Pangnirtung", "America/Pangnirtung"),
|
|
||||||
("America/Paramaribo", "America/Paramaribo"),
|
|
||||||
("America/Phoenix", "America/Phoenix"),
|
|
||||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
|
||||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
|
||||||
("America/Porto_Acre", "America/Porto_Acre"),
|
|
||||||
("America/Porto_Velho", "America/Porto_Velho"),
|
|
||||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
|
||||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
|
||||||
("America/Rainy_River", "America/Rainy_River"),
|
|
||||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
|
||||||
("America/Recife", "America/Recife"),
|
|
||||||
("America/Regina", "America/Regina"),
|
|
||||||
("America/Resolute", "America/Resolute"),
|
|
||||||
("America/Rio_Branco", "America/Rio_Branco"),
|
|
||||||
("America/Rosario", "America/Rosario"),
|
|
||||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
|
||||||
("America/Santarem", "America/Santarem"),
|
|
||||||
("America/Santiago", "America/Santiago"),
|
|
||||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
|
||||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
|
||||||
("America/Scoresbysund", "America/Scoresbysund"),
|
|
||||||
("America/Shiprock", "America/Shiprock"),
|
|
||||||
("America/Sitka", "America/Sitka"),
|
|
||||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
|
||||||
("America/St_Johns", "America/St_Johns"),
|
|
||||||
("America/St_Kitts", "America/St_Kitts"),
|
|
||||||
("America/St_Lucia", "America/St_Lucia"),
|
|
||||||
("America/St_Thomas", "America/St_Thomas"),
|
|
||||||
("America/St_Vincent", "America/St_Vincent"),
|
|
||||||
("America/Swift_Current", "America/Swift_Current"),
|
|
||||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
|
||||||
("America/Thule", "America/Thule"),
|
|
||||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
|
||||||
("America/Tijuana", "America/Tijuana"),
|
|
||||||
("America/Toronto", "America/Toronto"),
|
|
||||||
("America/Tortola", "America/Tortola"),
|
|
||||||
("America/Vancouver", "America/Vancouver"),
|
|
||||||
("America/Virgin", "America/Virgin"),
|
|
||||||
("America/Whitehorse", "America/Whitehorse"),
|
|
||||||
("America/Winnipeg", "America/Winnipeg"),
|
|
||||||
("America/Yakutat", "America/Yakutat"),
|
|
||||||
("America/Yellowknife", "America/Yellowknife"),
|
|
||||||
("Antarctica/Casey", "Antarctica/Casey"),
|
|
||||||
("Antarctica/Davis", "Antarctica/Davis"),
|
|
||||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
|
||||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
|
||||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
|
||||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
|
||||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
|
||||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
|
||||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
|
||||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
|
||||||
("Antarctica/Troll", "Antarctica/Troll"),
|
|
||||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
|
||||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
|
||||||
("Asia/Aden", "Asia/Aden"),
|
|
||||||
("Asia/Almaty", "Asia/Almaty"),
|
|
||||||
("Asia/Amman", "Asia/Amman"),
|
|
||||||
("Asia/Anadyr", "Asia/Anadyr"),
|
|
||||||
("Asia/Aqtau", "Asia/Aqtau"),
|
|
||||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
|
||||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
|
||||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
|
||||||
("Asia/Atyrau", "Asia/Atyrau"),
|
|
||||||
("Asia/Baghdad", "Asia/Baghdad"),
|
|
||||||
("Asia/Bahrain", "Asia/Bahrain"),
|
|
||||||
("Asia/Baku", "Asia/Baku"),
|
|
||||||
("Asia/Bangkok", "Asia/Bangkok"),
|
|
||||||
("Asia/Barnaul", "Asia/Barnaul"),
|
|
||||||
("Asia/Beirut", "Asia/Beirut"),
|
|
||||||
("Asia/Bishkek", "Asia/Bishkek"),
|
|
||||||
("Asia/Brunei", "Asia/Brunei"),
|
|
||||||
("Asia/Calcutta", "Asia/Calcutta"),
|
|
||||||
("Asia/Chita", "Asia/Chita"),
|
|
||||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
|
||||||
("Asia/Chongqing", "Asia/Chongqing"),
|
|
||||||
("Asia/Chungking", "Asia/Chungking"),
|
|
||||||
("Asia/Colombo", "Asia/Colombo"),
|
|
||||||
("Asia/Dacca", "Asia/Dacca"),
|
|
||||||
("Asia/Damascus", "Asia/Damascus"),
|
|
||||||
("Asia/Dhaka", "Asia/Dhaka"),
|
|
||||||
("Asia/Dili", "Asia/Dili"),
|
|
||||||
("Asia/Dubai", "Asia/Dubai"),
|
|
||||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
|
||||||
("Asia/Famagusta", "Asia/Famagusta"),
|
|
||||||
("Asia/Gaza", "Asia/Gaza"),
|
|
||||||
("Asia/Harbin", "Asia/Harbin"),
|
|
||||||
("Asia/Hebron", "Asia/Hebron"),
|
|
||||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
|
||||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
|
||||||
("Asia/Hovd", "Asia/Hovd"),
|
|
||||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
|
||||||
("Asia/Istanbul", "Asia/Istanbul"),
|
|
||||||
("Asia/Jakarta", "Asia/Jakarta"),
|
|
||||||
("Asia/Jayapura", "Asia/Jayapura"),
|
|
||||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
|
||||||
("Asia/Kabul", "Asia/Kabul"),
|
|
||||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
|
||||||
("Asia/Karachi", "Asia/Karachi"),
|
|
||||||
("Asia/Kashgar", "Asia/Kashgar"),
|
|
||||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
|
||||||
("Asia/Katmandu", "Asia/Katmandu"),
|
|
||||||
("Asia/Khandyga", "Asia/Khandyga"),
|
|
||||||
("Asia/Kolkata", "Asia/Kolkata"),
|
|
||||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
|
||||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
|
||||||
("Asia/Kuching", "Asia/Kuching"),
|
|
||||||
("Asia/Kuwait", "Asia/Kuwait"),
|
|
||||||
("Asia/Macao", "Asia/Macao"),
|
|
||||||
("Asia/Macau", "Asia/Macau"),
|
|
||||||
("Asia/Magadan", "Asia/Magadan"),
|
|
||||||
("Asia/Makassar", "Asia/Makassar"),
|
|
||||||
("Asia/Manila", "Asia/Manila"),
|
|
||||||
("Asia/Muscat", "Asia/Muscat"),
|
|
||||||
("Asia/Nicosia", "Asia/Nicosia"),
|
|
||||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
|
||||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
|
||||||
("Asia/Omsk", "Asia/Omsk"),
|
|
||||||
("Asia/Oral", "Asia/Oral"),
|
|
||||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
|
||||||
("Asia/Pontianak", "Asia/Pontianak"),
|
|
||||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
|
||||||
("Asia/Qatar", "Asia/Qatar"),
|
|
||||||
("Asia/Qostanay", "Asia/Qostanay"),
|
|
||||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
|
||||||
("Asia/Rangoon", "Asia/Rangoon"),
|
|
||||||
("Asia/Riyadh", "Asia/Riyadh"),
|
|
||||||
("Asia/Saigon", "Asia/Saigon"),
|
|
||||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
|
||||||
("Asia/Samarkand", "Asia/Samarkand"),
|
|
||||||
("Asia/Seoul", "Asia/Seoul"),
|
|
||||||
("Asia/Shanghai", "Asia/Shanghai"),
|
|
||||||
("Asia/Singapore", "Asia/Singapore"),
|
|
||||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
|
||||||
("Asia/Taipei", "Asia/Taipei"),
|
|
||||||
("Asia/Tashkent", "Asia/Tashkent"),
|
|
||||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
|
||||||
("Asia/Tehran", "Asia/Tehran"),
|
|
||||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
|
||||||
("Asia/Thimbu", "Asia/Thimbu"),
|
|
||||||
("Asia/Thimphu", "Asia/Thimphu"),
|
|
||||||
("Asia/Tokyo", "Asia/Tokyo"),
|
|
||||||
("Asia/Tomsk", "Asia/Tomsk"),
|
|
||||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
|
||||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
|
||||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
|
||||||
("Asia/Urumqi", "Asia/Urumqi"),
|
|
||||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
|
||||||
("Asia/Vientiane", "Asia/Vientiane"),
|
|
||||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
|
||||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
|
||||||
("Asia/Yangon", "Asia/Yangon"),
|
|
||||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
|
||||||
("Asia/Yerevan", "Asia/Yerevan"),
|
|
||||||
("Atlantic/Azores", "Atlantic/Azores"),
|
|
||||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
|
||||||
("Atlantic/Canary", "Atlantic/Canary"),
|
|
||||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
|
||||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
|
||||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
|
||||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
|
||||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
|
||||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
|
||||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
|
||||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
|
||||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
|
||||||
("Australia/ACT", "Australia/ACT"),
|
|
||||||
("Australia/Adelaide", "Australia/Adelaide"),
|
|
||||||
("Australia/Brisbane", "Australia/Brisbane"),
|
|
||||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
|
||||||
("Australia/Canberra", "Australia/Canberra"),
|
|
||||||
("Australia/Currie", "Australia/Currie"),
|
|
||||||
("Australia/Darwin", "Australia/Darwin"),
|
|
||||||
("Australia/Eucla", "Australia/Eucla"),
|
|
||||||
("Australia/Hobart", "Australia/Hobart"),
|
|
||||||
("Australia/LHI", "Australia/LHI"),
|
|
||||||
("Australia/Lindeman", "Australia/Lindeman"),
|
|
||||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
|
||||||
("Australia/Melbourne", "Australia/Melbourne"),
|
|
||||||
("Australia/NSW", "Australia/NSW"),
|
|
||||||
("Australia/North", "Australia/North"),
|
|
||||||
("Australia/Perth", "Australia/Perth"),
|
|
||||||
("Australia/Queensland", "Australia/Queensland"),
|
|
||||||
("Australia/South", "Australia/South"),
|
|
||||||
("Australia/Sydney", "Australia/Sydney"),
|
|
||||||
("Australia/Tasmania", "Australia/Tasmania"),
|
|
||||||
("Australia/Victoria", "Australia/Victoria"),
|
|
||||||
("Australia/West", "Australia/West"),
|
|
||||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
|
||||||
("Brazil/Acre", "Brazil/Acre"),
|
|
||||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
|
||||||
("Brazil/East", "Brazil/East"),
|
|
||||||
("Brazil/West", "Brazil/West"),
|
|
||||||
("CET", "CET"),
|
|
||||||
("CST6CDT", "CST6CDT"),
|
|
||||||
("Canada/Atlantic", "Canada/Atlantic"),
|
|
||||||
("Canada/Central", "Canada/Central"),
|
|
||||||
("Canada/Eastern", "Canada/Eastern"),
|
|
||||||
("Canada/Mountain", "Canada/Mountain"),
|
|
||||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
|
||||||
("Canada/Pacific", "Canada/Pacific"),
|
|
||||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
|
||||||
("Canada/Yukon", "Canada/Yukon"),
|
|
||||||
("Chile/Continental", "Chile/Continental"),
|
|
||||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
|
||||||
("Cuba", "Cuba"),
|
|
||||||
("EET", "EET"),
|
|
||||||
("EST", "EST"),
|
|
||||||
("EST5EDT", "EST5EDT"),
|
|
||||||
("Egypt", "Egypt"),
|
|
||||||
("Eire", "Eire"),
|
|
||||||
("Etc/GMT", "Etc/GMT"),
|
|
||||||
("Etc/GMT+0", "Etc/GMT+0"),
|
|
||||||
("Etc/GMT+1", "Etc/GMT+1"),
|
|
||||||
("Etc/GMT+10", "Etc/GMT+10"),
|
|
||||||
("Etc/GMT+11", "Etc/GMT+11"),
|
|
||||||
("Etc/GMT+12", "Etc/GMT+12"),
|
|
||||||
("Etc/GMT+2", "Etc/GMT+2"),
|
|
||||||
("Etc/GMT+3", "Etc/GMT+3"),
|
|
||||||
("Etc/GMT+4", "Etc/GMT+4"),
|
|
||||||
("Etc/GMT+5", "Etc/GMT+5"),
|
|
||||||
("Etc/GMT+6", "Etc/GMT+6"),
|
|
||||||
("Etc/GMT+7", "Etc/GMT+7"),
|
|
||||||
("Etc/GMT+8", "Etc/GMT+8"),
|
|
||||||
("Etc/GMT+9", "Etc/GMT+9"),
|
|
||||||
("Etc/GMT-0", "Etc/GMT-0"),
|
|
||||||
("Etc/GMT-1", "Etc/GMT-1"),
|
|
||||||
("Etc/GMT-10", "Etc/GMT-10"),
|
|
||||||
("Etc/GMT-11", "Etc/GMT-11"),
|
|
||||||
("Etc/GMT-12", "Etc/GMT-12"),
|
|
||||||
("Etc/GMT-13", "Etc/GMT-13"),
|
|
||||||
("Etc/GMT-14", "Etc/GMT-14"),
|
|
||||||
("Etc/GMT-2", "Etc/GMT-2"),
|
|
||||||
("Etc/GMT-3", "Etc/GMT-3"),
|
|
||||||
("Etc/GMT-4", "Etc/GMT-4"),
|
|
||||||
("Etc/GMT-5", "Etc/GMT-5"),
|
|
||||||
("Etc/GMT-6", "Etc/GMT-6"),
|
|
||||||
("Etc/GMT-7", "Etc/GMT-7"),
|
|
||||||
("Etc/GMT-8", "Etc/GMT-8"),
|
|
||||||
("Etc/GMT-9", "Etc/GMT-9"),
|
|
||||||
("Etc/GMT0", "Etc/GMT0"),
|
|
||||||
("Etc/Greenwich", "Etc/Greenwich"),
|
|
||||||
("Etc/UCT", "Etc/UCT"),
|
|
||||||
("Etc/UTC", "Etc/UTC"),
|
|
||||||
("Etc/Universal", "Etc/Universal"),
|
|
||||||
("Etc/Zulu", "Etc/Zulu"),
|
|
||||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
|
||||||
("Europe/Andorra", "Europe/Andorra"),
|
|
||||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
|
||||||
("Europe/Athens", "Europe/Athens"),
|
|
||||||
("Europe/Belfast", "Europe/Belfast"),
|
|
||||||
("Europe/Belgrade", "Europe/Belgrade"),
|
|
||||||
("Europe/Berlin", "Europe/Berlin"),
|
|
||||||
("Europe/Bratislava", "Europe/Bratislava"),
|
|
||||||
("Europe/Brussels", "Europe/Brussels"),
|
|
||||||
("Europe/Bucharest", "Europe/Bucharest"),
|
|
||||||
("Europe/Budapest", "Europe/Budapest"),
|
|
||||||
("Europe/Busingen", "Europe/Busingen"),
|
|
||||||
("Europe/Chisinau", "Europe/Chisinau"),
|
|
||||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
|
||||||
("Europe/Dublin", "Europe/Dublin"),
|
|
||||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
|
||||||
("Europe/Guernsey", "Europe/Guernsey"),
|
|
||||||
("Europe/Helsinki", "Europe/Helsinki"),
|
|
||||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
|
||||||
("Europe/Istanbul", "Europe/Istanbul"),
|
|
||||||
("Europe/Jersey", "Europe/Jersey"),
|
|
||||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
|
||||||
("Europe/Kiev", "Europe/Kiev"),
|
|
||||||
("Europe/Kirov", "Europe/Kirov"),
|
|
||||||
("Europe/Kyiv", "Europe/Kyiv"),
|
|
||||||
("Europe/Lisbon", "Europe/Lisbon"),
|
|
||||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
|
||||||
("Europe/London", "Europe/London"),
|
|
||||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
|
||||||
("Europe/Madrid", "Europe/Madrid"),
|
|
||||||
("Europe/Malta", "Europe/Malta"),
|
|
||||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
|
||||||
("Europe/Minsk", "Europe/Minsk"),
|
|
||||||
("Europe/Monaco", "Europe/Monaco"),
|
|
||||||
("Europe/Moscow", "Europe/Moscow"),
|
|
||||||
("Europe/Nicosia", "Europe/Nicosia"),
|
|
||||||
("Europe/Oslo", "Europe/Oslo"),
|
|
||||||
("Europe/Paris", "Europe/Paris"),
|
|
||||||
("Europe/Podgorica", "Europe/Podgorica"),
|
|
||||||
("Europe/Prague", "Europe/Prague"),
|
|
||||||
("Europe/Riga", "Europe/Riga"),
|
|
||||||
("Europe/Rome", "Europe/Rome"),
|
|
||||||
("Europe/Samara", "Europe/Samara"),
|
|
||||||
("Europe/San_Marino", "Europe/San_Marino"),
|
|
||||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
|
||||||
("Europe/Saratov", "Europe/Saratov"),
|
|
||||||
("Europe/Simferopol", "Europe/Simferopol"),
|
|
||||||
("Europe/Skopje", "Europe/Skopje"),
|
|
||||||
("Europe/Sofia", "Europe/Sofia"),
|
|
||||||
("Europe/Stockholm", "Europe/Stockholm"),
|
|
||||||
("Europe/Tallinn", "Europe/Tallinn"),
|
|
||||||
("Europe/Tirane", "Europe/Tirane"),
|
|
||||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
|
||||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
|
||||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
|
||||||
("Europe/Vaduz", "Europe/Vaduz"),
|
|
||||||
("Europe/Vatican", "Europe/Vatican"),
|
|
||||||
("Europe/Vienna", "Europe/Vienna"),
|
|
||||||
("Europe/Vilnius", "Europe/Vilnius"),
|
|
||||||
("Europe/Volgograd", "Europe/Volgograd"),
|
|
||||||
("Europe/Warsaw", "Europe/Warsaw"),
|
|
||||||
("Europe/Zagreb", "Europe/Zagreb"),
|
|
||||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
|
||||||
("Europe/Zurich", "Europe/Zurich"),
|
|
||||||
("GB", "GB"),
|
|
||||||
("GB-Eire", "GB-Eire"),
|
|
||||||
("GMT", "GMT"),
|
|
||||||
("GMT+0", "GMT+0"),
|
|
||||||
("GMT-0", "GMT-0"),
|
|
||||||
("GMT0", "GMT0"),
|
|
||||||
("Greenwich", "Greenwich"),
|
|
||||||
("HST", "HST"),
|
|
||||||
("Hongkong", "Hongkong"),
|
|
||||||
("Iceland", "Iceland"),
|
|
||||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
|
||||||
("Indian/Chagos", "Indian/Chagos"),
|
|
||||||
("Indian/Christmas", "Indian/Christmas"),
|
|
||||||
("Indian/Cocos", "Indian/Cocos"),
|
|
||||||
("Indian/Comoro", "Indian/Comoro"),
|
|
||||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
|
||||||
("Indian/Mahe", "Indian/Mahe"),
|
|
||||||
("Indian/Maldives", "Indian/Maldives"),
|
|
||||||
("Indian/Mauritius", "Indian/Mauritius"),
|
|
||||||
("Indian/Mayotte", "Indian/Mayotte"),
|
|
||||||
("Indian/Reunion", "Indian/Reunion"),
|
|
||||||
("Iran", "Iran"),
|
|
||||||
("Israel", "Israel"),
|
|
||||||
("Jamaica", "Jamaica"),
|
|
||||||
("Japan", "Japan"),
|
|
||||||
("Kwajalein", "Kwajalein"),
|
|
||||||
("Libya", "Libya"),
|
|
||||||
("MET", "MET"),
|
|
||||||
("MST", "MST"),
|
|
||||||
("MST7MDT", "MST7MDT"),
|
|
||||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
|
||||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
|
||||||
("Mexico/General", "Mexico/General"),
|
|
||||||
("NZ", "NZ"),
|
|
||||||
("NZ-CHAT", "NZ-CHAT"),
|
|
||||||
("Navajo", "Navajo"),
|
|
||||||
("PRC", "PRC"),
|
|
||||||
("PST8PDT", "PST8PDT"),
|
|
||||||
("Pacific/Apia", "Pacific/Apia"),
|
|
||||||
("Pacific/Auckland", "Pacific/Auckland"),
|
|
||||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
|
||||||
("Pacific/Chatham", "Pacific/Chatham"),
|
|
||||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
|
||||||
("Pacific/Easter", "Pacific/Easter"),
|
|
||||||
("Pacific/Efate", "Pacific/Efate"),
|
|
||||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
|
||||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
|
||||||
("Pacific/Fiji", "Pacific/Fiji"),
|
|
||||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
|
||||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
|
||||||
("Pacific/Gambier", "Pacific/Gambier"),
|
|
||||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
|
||||||
("Pacific/Guam", "Pacific/Guam"),
|
|
||||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
|
||||||
("Pacific/Johnston", "Pacific/Johnston"),
|
|
||||||
("Pacific/Kanton", "Pacific/Kanton"),
|
|
||||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
|
||||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
|
||||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
|
||||||
("Pacific/Majuro", "Pacific/Majuro"),
|
|
||||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
|
||||||
("Pacific/Midway", "Pacific/Midway"),
|
|
||||||
("Pacific/Nauru", "Pacific/Nauru"),
|
|
||||||
("Pacific/Niue", "Pacific/Niue"),
|
|
||||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
|
||||||
("Pacific/Noumea", "Pacific/Noumea"),
|
|
||||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
|
||||||
("Pacific/Palau", "Pacific/Palau"),
|
|
||||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
|
||||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
|
||||||
("Pacific/Ponape", "Pacific/Ponape"),
|
|
||||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
|
||||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
|
||||||
("Pacific/Saipan", "Pacific/Saipan"),
|
|
||||||
("Pacific/Samoa", "Pacific/Samoa"),
|
|
||||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
|
||||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
|
||||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
|
||||||
("Pacific/Truk", "Pacific/Truk"),
|
|
||||||
("Pacific/Wake", "Pacific/Wake"),
|
|
||||||
("Pacific/Wallis", "Pacific/Wallis"),
|
|
||||||
("Pacific/Yap", "Pacific/Yap"),
|
|
||||||
("Poland", "Poland"),
|
|
||||||
("Portugal", "Portugal"),
|
|
||||||
("ROC", "ROC"),
|
|
||||||
("ROK", "ROK"),
|
|
||||||
("Singapore", "Singapore"),
|
|
||||||
("Turkey", "Turkey"),
|
|
||||||
("UCT", "UCT"),
|
|
||||||
("US/Alaska", "US/Alaska"),
|
|
||||||
("US/Aleutian", "US/Aleutian"),
|
|
||||||
("US/Arizona", "US/Arizona"),
|
|
||||||
("US/Central", "US/Central"),
|
|
||||||
("US/East-Indiana", "US/East-Indiana"),
|
|
||||||
("US/Eastern", "US/Eastern"),
|
|
||||||
("US/Hawaii", "US/Hawaii"),
|
|
||||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
|
||||||
("US/Michigan", "US/Michigan"),
|
|
||||||
("US/Mountain", "US/Mountain"),
|
|
||||||
("US/Pacific", "US/Pacific"),
|
|
||||||
("US/Samoa", "US/Samoa"),
|
|
||||||
("UTC", "UTC"),
|
|
||||||
("Universal", "Universal"),
|
|
||||||
("W-SU", "W-SU"),
|
|
||||||
("WET", "WET"),
|
|
||||||
("Zulu", "Zulu"),
|
|
||||||
],
|
|
||||||
max_length=255,
|
|
||||||
null=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,631 +0,0 @@
|
|||||||
# Generated by Django 4.1.7 on 2023-02-28 22:14
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("agents", "0055_alter_agent_time_zone"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="agent",
|
|
||||||
name="time_zone",
|
|
||||||
field=models.CharField(
|
|
||||||
blank=True,
|
|
||||||
choices=[
|
|
||||||
("Africa/Abidjan", "Africa/Abidjan"),
|
|
||||||
("Africa/Accra", "Africa/Accra"),
|
|
||||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
|
||||||
("Africa/Algiers", "Africa/Algiers"),
|
|
||||||
("Africa/Asmara", "Africa/Asmara"),
|
|
||||||
("Africa/Asmera", "Africa/Asmera"),
|
|
||||||
("Africa/Bamako", "Africa/Bamako"),
|
|
||||||
("Africa/Bangui", "Africa/Bangui"),
|
|
||||||
("Africa/Banjul", "Africa/Banjul"),
|
|
||||||
("Africa/Bissau", "Africa/Bissau"),
|
|
||||||
("Africa/Blantyre", "Africa/Blantyre"),
|
|
||||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
|
||||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
|
||||||
("Africa/Cairo", "Africa/Cairo"),
|
|
||||||
("Africa/Casablanca", "Africa/Casablanca"),
|
|
||||||
("Africa/Ceuta", "Africa/Ceuta"),
|
|
||||||
("Africa/Conakry", "Africa/Conakry"),
|
|
||||||
("Africa/Dakar", "Africa/Dakar"),
|
|
||||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
|
||||||
("Africa/Djibouti", "Africa/Djibouti"),
|
|
||||||
("Africa/Douala", "Africa/Douala"),
|
|
||||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
|
||||||
("Africa/Freetown", "Africa/Freetown"),
|
|
||||||
("Africa/Gaborone", "Africa/Gaborone"),
|
|
||||||
("Africa/Harare", "Africa/Harare"),
|
|
||||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
|
||||||
("Africa/Juba", "Africa/Juba"),
|
|
||||||
("Africa/Kampala", "Africa/Kampala"),
|
|
||||||
("Africa/Khartoum", "Africa/Khartoum"),
|
|
||||||
("Africa/Kigali", "Africa/Kigali"),
|
|
||||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
|
||||||
("Africa/Lagos", "Africa/Lagos"),
|
|
||||||
("Africa/Libreville", "Africa/Libreville"),
|
|
||||||
("Africa/Lome", "Africa/Lome"),
|
|
||||||
("Africa/Luanda", "Africa/Luanda"),
|
|
||||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
|
||||||
("Africa/Lusaka", "Africa/Lusaka"),
|
|
||||||
("Africa/Malabo", "Africa/Malabo"),
|
|
||||||
("Africa/Maputo", "Africa/Maputo"),
|
|
||||||
("Africa/Maseru", "Africa/Maseru"),
|
|
||||||
("Africa/Mbabane", "Africa/Mbabane"),
|
|
||||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
|
||||||
("Africa/Monrovia", "Africa/Monrovia"),
|
|
||||||
("Africa/Nairobi", "Africa/Nairobi"),
|
|
||||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
|
||||||
("Africa/Niamey", "Africa/Niamey"),
|
|
||||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
|
||||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
|
||||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
|
||||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
|
||||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
|
||||||
("Africa/Tripoli", "Africa/Tripoli"),
|
|
||||||
("Africa/Tunis", "Africa/Tunis"),
|
|
||||||
("Africa/Windhoek", "Africa/Windhoek"),
|
|
||||||
("America/Adak", "America/Adak"),
|
|
||||||
("America/Anchorage", "America/Anchorage"),
|
|
||||||
("America/Anguilla", "America/Anguilla"),
|
|
||||||
("America/Antigua", "America/Antigua"),
|
|
||||||
("America/Araguaina", "America/Araguaina"),
|
|
||||||
(
|
|
||||||
"America/Argentina/Buenos_Aires",
|
|
||||||
"America/Argentina/Buenos_Aires",
|
|
||||||
),
|
|
||||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
|
||||||
(
|
|
||||||
"America/Argentina/ComodRivadavia",
|
|
||||||
"America/Argentina/ComodRivadavia",
|
|
||||||
),
|
|
||||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
|
||||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
|
||||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
|
||||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
|
||||||
(
|
|
||||||
"America/Argentina/Rio_Gallegos",
|
|
||||||
"America/Argentina/Rio_Gallegos",
|
|
||||||
),
|
|
||||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
|
||||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
|
||||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
|
||||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
|
||||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
|
||||||
("America/Aruba", "America/Aruba"),
|
|
||||||
("America/Asuncion", "America/Asuncion"),
|
|
||||||
("America/Atikokan", "America/Atikokan"),
|
|
||||||
("America/Atka", "America/Atka"),
|
|
||||||
("America/Bahia", "America/Bahia"),
|
|
||||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
|
||||||
("America/Barbados", "America/Barbados"),
|
|
||||||
("America/Belem", "America/Belem"),
|
|
||||||
("America/Belize", "America/Belize"),
|
|
||||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
|
||||||
("America/Boa_Vista", "America/Boa_Vista"),
|
|
||||||
("America/Bogota", "America/Bogota"),
|
|
||||||
("America/Boise", "America/Boise"),
|
|
||||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
|
||||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
|
||||||
("America/Campo_Grande", "America/Campo_Grande"),
|
|
||||||
("America/Cancun", "America/Cancun"),
|
|
||||||
("America/Caracas", "America/Caracas"),
|
|
||||||
("America/Catamarca", "America/Catamarca"),
|
|
||||||
("America/Cayenne", "America/Cayenne"),
|
|
||||||
("America/Cayman", "America/Cayman"),
|
|
||||||
("America/Chicago", "America/Chicago"),
|
|
||||||
("America/Chihuahua", "America/Chihuahua"),
|
|
||||||
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
|
|
||||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
|
||||||
("America/Cordoba", "America/Cordoba"),
|
|
||||||
("America/Costa_Rica", "America/Costa_Rica"),
|
|
||||||
("America/Creston", "America/Creston"),
|
|
||||||
("America/Cuiaba", "America/Cuiaba"),
|
|
||||||
("America/Curacao", "America/Curacao"),
|
|
||||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
|
||||||
("America/Dawson", "America/Dawson"),
|
|
||||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
|
||||||
("America/Denver", "America/Denver"),
|
|
||||||
("America/Detroit", "America/Detroit"),
|
|
||||||
("America/Dominica", "America/Dominica"),
|
|
||||||
("America/Edmonton", "America/Edmonton"),
|
|
||||||
("America/Eirunepe", "America/Eirunepe"),
|
|
||||||
("America/El_Salvador", "America/El_Salvador"),
|
|
||||||
("America/Ensenada", "America/Ensenada"),
|
|
||||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
|
||||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
|
||||||
("America/Fortaleza", "America/Fortaleza"),
|
|
||||||
("America/Glace_Bay", "America/Glace_Bay"),
|
|
||||||
("America/Godthab", "America/Godthab"),
|
|
||||||
("America/Goose_Bay", "America/Goose_Bay"),
|
|
||||||
("America/Grand_Turk", "America/Grand_Turk"),
|
|
||||||
("America/Grenada", "America/Grenada"),
|
|
||||||
("America/Guadeloupe", "America/Guadeloupe"),
|
|
||||||
("America/Guatemala", "America/Guatemala"),
|
|
||||||
("America/Guayaquil", "America/Guayaquil"),
|
|
||||||
("America/Guyana", "America/Guyana"),
|
|
||||||
("America/Halifax", "America/Halifax"),
|
|
||||||
("America/Havana", "America/Havana"),
|
|
||||||
("America/Hermosillo", "America/Hermosillo"),
|
|
||||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
|
||||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
|
||||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
|
||||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
|
||||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
|
||||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
|
||||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
|
||||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
|
||||||
("America/Indianapolis", "America/Indianapolis"),
|
|
||||||
("America/Inuvik", "America/Inuvik"),
|
|
||||||
("America/Iqaluit", "America/Iqaluit"),
|
|
||||||
("America/Jamaica", "America/Jamaica"),
|
|
||||||
("America/Jujuy", "America/Jujuy"),
|
|
||||||
("America/Juneau", "America/Juneau"),
|
|
||||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
|
||||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
|
||||||
("America/Knox_IN", "America/Knox_IN"),
|
|
||||||
("America/Kralendijk", "America/Kralendijk"),
|
|
||||||
("America/La_Paz", "America/La_Paz"),
|
|
||||||
("America/Lima", "America/Lima"),
|
|
||||||
("America/Los_Angeles", "America/Los_Angeles"),
|
|
||||||
("America/Louisville", "America/Louisville"),
|
|
||||||
("America/Lower_Princes", "America/Lower_Princes"),
|
|
||||||
("America/Maceio", "America/Maceio"),
|
|
||||||
("America/Managua", "America/Managua"),
|
|
||||||
("America/Manaus", "America/Manaus"),
|
|
||||||
("America/Marigot", "America/Marigot"),
|
|
||||||
("America/Martinique", "America/Martinique"),
|
|
||||||
("America/Matamoros", "America/Matamoros"),
|
|
||||||
("America/Mazatlan", "America/Mazatlan"),
|
|
||||||
("America/Mendoza", "America/Mendoza"),
|
|
||||||
("America/Menominee", "America/Menominee"),
|
|
||||||
("America/Merida", "America/Merida"),
|
|
||||||
("America/Metlakatla", "America/Metlakatla"),
|
|
||||||
("America/Mexico_City", "America/Mexico_City"),
|
|
||||||
("America/Miquelon", "America/Miquelon"),
|
|
||||||
("America/Moncton", "America/Moncton"),
|
|
||||||
("America/Monterrey", "America/Monterrey"),
|
|
||||||
("America/Montevideo", "America/Montevideo"),
|
|
||||||
("America/Montreal", "America/Montreal"),
|
|
||||||
("America/Montserrat", "America/Montserrat"),
|
|
||||||
("America/Nassau", "America/Nassau"),
|
|
||||||
("America/New_York", "America/New_York"),
|
|
||||||
("America/Nipigon", "America/Nipigon"),
|
|
||||||
("America/Nome", "America/Nome"),
|
|
||||||
("America/Noronha", "America/Noronha"),
|
|
||||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
|
||||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
|
||||||
(
|
|
||||||
"America/North_Dakota/New_Salem",
|
|
||||||
"America/North_Dakota/New_Salem",
|
|
||||||
),
|
|
||||||
("America/Nuuk", "America/Nuuk"),
|
|
||||||
("America/Ojinaga", "America/Ojinaga"),
|
|
||||||
("America/Panama", "America/Panama"),
|
|
||||||
("America/Pangnirtung", "America/Pangnirtung"),
|
|
||||||
("America/Paramaribo", "America/Paramaribo"),
|
|
||||||
("America/Phoenix", "America/Phoenix"),
|
|
||||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
|
||||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
|
||||||
("America/Porto_Acre", "America/Porto_Acre"),
|
|
||||||
("America/Porto_Velho", "America/Porto_Velho"),
|
|
||||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
|
||||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
|
||||||
("America/Rainy_River", "America/Rainy_River"),
|
|
||||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
|
||||||
("America/Recife", "America/Recife"),
|
|
||||||
("America/Regina", "America/Regina"),
|
|
||||||
("America/Resolute", "America/Resolute"),
|
|
||||||
("America/Rio_Branco", "America/Rio_Branco"),
|
|
||||||
("America/Rosario", "America/Rosario"),
|
|
||||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
|
||||||
("America/Santarem", "America/Santarem"),
|
|
||||||
("America/Santiago", "America/Santiago"),
|
|
||||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
|
||||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
|
||||||
("America/Scoresbysund", "America/Scoresbysund"),
|
|
||||||
("America/Shiprock", "America/Shiprock"),
|
|
||||||
("America/Sitka", "America/Sitka"),
|
|
||||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
|
||||||
("America/St_Johns", "America/St_Johns"),
|
|
||||||
("America/St_Kitts", "America/St_Kitts"),
|
|
||||||
("America/St_Lucia", "America/St_Lucia"),
|
|
||||||
("America/St_Thomas", "America/St_Thomas"),
|
|
||||||
("America/St_Vincent", "America/St_Vincent"),
|
|
||||||
("America/Swift_Current", "America/Swift_Current"),
|
|
||||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
|
||||||
("America/Thule", "America/Thule"),
|
|
||||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
|
||||||
("America/Tijuana", "America/Tijuana"),
|
|
||||||
("America/Toronto", "America/Toronto"),
|
|
||||||
("America/Tortola", "America/Tortola"),
|
|
||||||
("America/Vancouver", "America/Vancouver"),
|
|
||||||
("America/Virgin", "America/Virgin"),
|
|
||||||
("America/Whitehorse", "America/Whitehorse"),
|
|
||||||
("America/Winnipeg", "America/Winnipeg"),
|
|
||||||
("America/Yakutat", "America/Yakutat"),
|
|
||||||
("America/Yellowknife", "America/Yellowknife"),
|
|
||||||
("Antarctica/Casey", "Antarctica/Casey"),
|
|
||||||
("Antarctica/Davis", "Antarctica/Davis"),
|
|
||||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
|
||||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
|
||||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
|
||||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
|
||||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
|
||||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
|
||||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
|
||||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
|
||||||
("Antarctica/Troll", "Antarctica/Troll"),
|
|
||||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
|
||||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
|
||||||
("Asia/Aden", "Asia/Aden"),
|
|
||||||
("Asia/Almaty", "Asia/Almaty"),
|
|
||||||
("Asia/Amman", "Asia/Amman"),
|
|
||||||
("Asia/Anadyr", "Asia/Anadyr"),
|
|
||||||
("Asia/Aqtau", "Asia/Aqtau"),
|
|
||||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
|
||||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
|
||||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
|
||||||
("Asia/Atyrau", "Asia/Atyrau"),
|
|
||||||
("Asia/Baghdad", "Asia/Baghdad"),
|
|
||||||
("Asia/Bahrain", "Asia/Bahrain"),
|
|
||||||
("Asia/Baku", "Asia/Baku"),
|
|
||||||
("Asia/Bangkok", "Asia/Bangkok"),
|
|
||||||
("Asia/Barnaul", "Asia/Barnaul"),
|
|
||||||
("Asia/Beirut", "Asia/Beirut"),
|
|
||||||
("Asia/Bishkek", "Asia/Bishkek"),
|
|
||||||
("Asia/Brunei", "Asia/Brunei"),
|
|
||||||
("Asia/Calcutta", "Asia/Calcutta"),
|
|
||||||
("Asia/Chita", "Asia/Chita"),
|
|
||||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
|
||||||
("Asia/Chongqing", "Asia/Chongqing"),
|
|
||||||
("Asia/Chungking", "Asia/Chungking"),
|
|
||||||
("Asia/Colombo", "Asia/Colombo"),
|
|
||||||
("Asia/Dacca", "Asia/Dacca"),
|
|
||||||
("Asia/Damascus", "Asia/Damascus"),
|
|
||||||
("Asia/Dhaka", "Asia/Dhaka"),
|
|
||||||
("Asia/Dili", "Asia/Dili"),
|
|
||||||
("Asia/Dubai", "Asia/Dubai"),
|
|
||||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
|
||||||
("Asia/Famagusta", "Asia/Famagusta"),
|
|
||||||
("Asia/Gaza", "Asia/Gaza"),
|
|
||||||
("Asia/Harbin", "Asia/Harbin"),
|
|
||||||
("Asia/Hebron", "Asia/Hebron"),
|
|
||||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
|
||||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
|
||||||
("Asia/Hovd", "Asia/Hovd"),
|
|
||||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
|
||||||
("Asia/Istanbul", "Asia/Istanbul"),
|
|
||||||
("Asia/Jakarta", "Asia/Jakarta"),
|
|
||||||
("Asia/Jayapura", "Asia/Jayapura"),
|
|
||||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
|
||||||
("Asia/Kabul", "Asia/Kabul"),
|
|
||||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
|
||||||
("Asia/Karachi", "Asia/Karachi"),
|
|
||||||
("Asia/Kashgar", "Asia/Kashgar"),
|
|
||||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
|
||||||
("Asia/Katmandu", "Asia/Katmandu"),
|
|
||||||
("Asia/Khandyga", "Asia/Khandyga"),
|
|
||||||
("Asia/Kolkata", "Asia/Kolkata"),
|
|
||||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
|
||||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
|
||||||
("Asia/Kuching", "Asia/Kuching"),
|
|
||||||
("Asia/Kuwait", "Asia/Kuwait"),
|
|
||||||
("Asia/Macao", "Asia/Macao"),
|
|
||||||
("Asia/Macau", "Asia/Macau"),
|
|
||||||
("Asia/Magadan", "Asia/Magadan"),
|
|
||||||
("Asia/Makassar", "Asia/Makassar"),
|
|
||||||
("Asia/Manila", "Asia/Manila"),
|
|
||||||
("Asia/Muscat", "Asia/Muscat"),
|
|
||||||
("Asia/Nicosia", "Asia/Nicosia"),
|
|
||||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
|
||||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
|
||||||
("Asia/Omsk", "Asia/Omsk"),
|
|
||||||
("Asia/Oral", "Asia/Oral"),
|
|
||||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
|
||||||
("Asia/Pontianak", "Asia/Pontianak"),
|
|
||||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
|
||||||
("Asia/Qatar", "Asia/Qatar"),
|
|
||||||
("Asia/Qostanay", "Asia/Qostanay"),
|
|
||||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
|
||||||
("Asia/Rangoon", "Asia/Rangoon"),
|
|
||||||
("Asia/Riyadh", "Asia/Riyadh"),
|
|
||||||
("Asia/Saigon", "Asia/Saigon"),
|
|
||||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
|
||||||
("Asia/Samarkand", "Asia/Samarkand"),
|
|
||||||
("Asia/Seoul", "Asia/Seoul"),
|
|
||||||
("Asia/Shanghai", "Asia/Shanghai"),
|
|
||||||
("Asia/Singapore", "Asia/Singapore"),
|
|
||||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
|
||||||
("Asia/Taipei", "Asia/Taipei"),
|
|
||||||
("Asia/Tashkent", "Asia/Tashkent"),
|
|
||||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
|
||||||
("Asia/Tehran", "Asia/Tehran"),
|
|
||||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
|
||||||
("Asia/Thimbu", "Asia/Thimbu"),
|
|
||||||
("Asia/Thimphu", "Asia/Thimphu"),
|
|
||||||
("Asia/Tokyo", "Asia/Tokyo"),
|
|
||||||
("Asia/Tomsk", "Asia/Tomsk"),
|
|
||||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
|
||||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
|
||||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
|
||||||
("Asia/Urumqi", "Asia/Urumqi"),
|
|
||||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
|
||||||
("Asia/Vientiane", "Asia/Vientiane"),
|
|
||||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
|
||||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
|
||||||
("Asia/Yangon", "Asia/Yangon"),
|
|
||||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
|
||||||
("Asia/Yerevan", "Asia/Yerevan"),
|
|
||||||
("Atlantic/Azores", "Atlantic/Azores"),
|
|
||||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
|
||||||
("Atlantic/Canary", "Atlantic/Canary"),
|
|
||||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
|
||||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
|
||||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
|
||||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
|
||||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
|
||||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
|
||||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
|
||||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
|
||||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
|
||||||
("Australia/ACT", "Australia/ACT"),
|
|
||||||
("Australia/Adelaide", "Australia/Adelaide"),
|
|
||||||
("Australia/Brisbane", "Australia/Brisbane"),
|
|
||||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
|
||||||
("Australia/Canberra", "Australia/Canberra"),
|
|
||||||
("Australia/Currie", "Australia/Currie"),
|
|
||||||
("Australia/Darwin", "Australia/Darwin"),
|
|
||||||
("Australia/Eucla", "Australia/Eucla"),
|
|
||||||
("Australia/Hobart", "Australia/Hobart"),
|
|
||||||
("Australia/LHI", "Australia/LHI"),
|
|
||||||
("Australia/Lindeman", "Australia/Lindeman"),
|
|
||||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
|
||||||
("Australia/Melbourne", "Australia/Melbourne"),
|
|
||||||
("Australia/NSW", "Australia/NSW"),
|
|
||||||
("Australia/North", "Australia/North"),
|
|
||||||
("Australia/Perth", "Australia/Perth"),
|
|
||||||
("Australia/Queensland", "Australia/Queensland"),
|
|
||||||
("Australia/South", "Australia/South"),
|
|
||||||
("Australia/Sydney", "Australia/Sydney"),
|
|
||||||
("Australia/Tasmania", "Australia/Tasmania"),
|
|
||||||
("Australia/Victoria", "Australia/Victoria"),
|
|
||||||
("Australia/West", "Australia/West"),
|
|
||||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
|
||||||
("Brazil/Acre", "Brazil/Acre"),
|
|
||||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
|
||||||
("Brazil/East", "Brazil/East"),
|
|
||||||
("Brazil/West", "Brazil/West"),
|
|
||||||
("CET", "CET"),
|
|
||||||
("CST6CDT", "CST6CDT"),
|
|
||||||
("Canada/Atlantic", "Canada/Atlantic"),
|
|
||||||
("Canada/Central", "Canada/Central"),
|
|
||||||
("Canada/Eastern", "Canada/Eastern"),
|
|
||||||
("Canada/Mountain", "Canada/Mountain"),
|
|
||||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
|
||||||
("Canada/Pacific", "Canada/Pacific"),
|
|
||||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
|
||||||
("Canada/Yukon", "Canada/Yukon"),
|
|
||||||
("Chile/Continental", "Chile/Continental"),
|
|
||||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
|
||||||
("Cuba", "Cuba"),
|
|
||||||
("EET", "EET"),
|
|
||||||
("EST", "EST"),
|
|
||||||
("EST5EDT", "EST5EDT"),
|
|
||||||
("Egypt", "Egypt"),
|
|
||||||
("Eire", "Eire"),
|
|
||||||
("Etc/GMT", "Etc/GMT"),
|
|
||||||
("Etc/GMT+0", "Etc/GMT+0"),
|
|
||||||
("Etc/GMT+1", "Etc/GMT+1"),
|
|
||||||
("Etc/GMT+10", "Etc/GMT+10"),
|
|
||||||
("Etc/GMT+11", "Etc/GMT+11"),
|
|
||||||
("Etc/GMT+12", "Etc/GMT+12"),
|
|
||||||
("Etc/GMT+2", "Etc/GMT+2"),
|
|
||||||
("Etc/GMT+3", "Etc/GMT+3"),
|
|
||||||
("Etc/GMT+4", "Etc/GMT+4"),
|
|
||||||
("Etc/GMT+5", "Etc/GMT+5"),
|
|
||||||
("Etc/GMT+6", "Etc/GMT+6"),
|
|
||||||
("Etc/GMT+7", "Etc/GMT+7"),
|
|
||||||
("Etc/GMT+8", "Etc/GMT+8"),
|
|
||||||
("Etc/GMT+9", "Etc/GMT+9"),
|
|
||||||
("Etc/GMT-0", "Etc/GMT-0"),
|
|
||||||
("Etc/GMT-1", "Etc/GMT-1"),
|
|
||||||
("Etc/GMT-10", "Etc/GMT-10"),
|
|
||||||
("Etc/GMT-11", "Etc/GMT-11"),
|
|
||||||
("Etc/GMT-12", "Etc/GMT-12"),
|
|
||||||
("Etc/GMT-13", "Etc/GMT-13"),
|
|
||||||
("Etc/GMT-14", "Etc/GMT-14"),
|
|
||||||
("Etc/GMT-2", "Etc/GMT-2"),
|
|
||||||
("Etc/GMT-3", "Etc/GMT-3"),
|
|
||||||
("Etc/GMT-4", "Etc/GMT-4"),
|
|
||||||
("Etc/GMT-5", "Etc/GMT-5"),
|
|
||||||
("Etc/GMT-6", "Etc/GMT-6"),
|
|
||||||
("Etc/GMT-7", "Etc/GMT-7"),
|
|
||||||
("Etc/GMT-8", "Etc/GMT-8"),
|
|
||||||
("Etc/GMT-9", "Etc/GMT-9"),
|
|
||||||
("Etc/GMT0", "Etc/GMT0"),
|
|
||||||
("Etc/Greenwich", "Etc/Greenwich"),
|
|
||||||
("Etc/UCT", "Etc/UCT"),
|
|
||||||
("Etc/UTC", "Etc/UTC"),
|
|
||||||
("Etc/Universal", "Etc/Universal"),
|
|
||||||
("Etc/Zulu", "Etc/Zulu"),
|
|
||||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
|
||||||
("Europe/Andorra", "Europe/Andorra"),
|
|
||||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
|
||||||
("Europe/Athens", "Europe/Athens"),
|
|
||||||
("Europe/Belfast", "Europe/Belfast"),
|
|
||||||
("Europe/Belgrade", "Europe/Belgrade"),
|
|
||||||
("Europe/Berlin", "Europe/Berlin"),
|
|
||||||
("Europe/Bratislava", "Europe/Bratislava"),
|
|
||||||
("Europe/Brussels", "Europe/Brussels"),
|
|
||||||
("Europe/Bucharest", "Europe/Bucharest"),
|
|
||||||
("Europe/Budapest", "Europe/Budapest"),
|
|
||||||
("Europe/Busingen", "Europe/Busingen"),
|
|
||||||
("Europe/Chisinau", "Europe/Chisinau"),
|
|
||||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
|
||||||
("Europe/Dublin", "Europe/Dublin"),
|
|
||||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
|
||||||
("Europe/Guernsey", "Europe/Guernsey"),
|
|
||||||
("Europe/Helsinki", "Europe/Helsinki"),
|
|
||||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
|
||||||
("Europe/Istanbul", "Europe/Istanbul"),
|
|
||||||
("Europe/Jersey", "Europe/Jersey"),
|
|
||||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
|
||||||
("Europe/Kiev", "Europe/Kiev"),
|
|
||||||
("Europe/Kirov", "Europe/Kirov"),
|
|
||||||
("Europe/Kyiv", "Europe/Kyiv"),
|
|
||||||
("Europe/Lisbon", "Europe/Lisbon"),
|
|
||||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
|
||||||
("Europe/London", "Europe/London"),
|
|
||||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
|
||||||
("Europe/Madrid", "Europe/Madrid"),
|
|
||||||
("Europe/Malta", "Europe/Malta"),
|
|
||||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
|
||||||
("Europe/Minsk", "Europe/Minsk"),
|
|
||||||
("Europe/Monaco", "Europe/Monaco"),
|
|
||||||
("Europe/Moscow", "Europe/Moscow"),
|
|
||||||
("Europe/Nicosia", "Europe/Nicosia"),
|
|
||||||
("Europe/Oslo", "Europe/Oslo"),
|
|
||||||
("Europe/Paris", "Europe/Paris"),
|
|
||||||
("Europe/Podgorica", "Europe/Podgorica"),
|
|
||||||
("Europe/Prague", "Europe/Prague"),
|
|
||||||
("Europe/Riga", "Europe/Riga"),
|
|
||||||
("Europe/Rome", "Europe/Rome"),
|
|
||||||
("Europe/Samara", "Europe/Samara"),
|
|
||||||
("Europe/San_Marino", "Europe/San_Marino"),
|
|
||||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
|
||||||
("Europe/Saratov", "Europe/Saratov"),
|
|
||||||
("Europe/Simferopol", "Europe/Simferopol"),
|
|
||||||
("Europe/Skopje", "Europe/Skopje"),
|
|
||||||
("Europe/Sofia", "Europe/Sofia"),
|
|
||||||
("Europe/Stockholm", "Europe/Stockholm"),
|
|
||||||
("Europe/Tallinn", "Europe/Tallinn"),
|
|
||||||
("Europe/Tirane", "Europe/Tirane"),
|
|
||||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
|
||||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
|
||||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
|
||||||
("Europe/Vaduz", "Europe/Vaduz"),
|
|
||||||
("Europe/Vatican", "Europe/Vatican"),
|
|
||||||
("Europe/Vienna", "Europe/Vienna"),
|
|
||||||
("Europe/Vilnius", "Europe/Vilnius"),
|
|
||||||
("Europe/Volgograd", "Europe/Volgograd"),
|
|
||||||
("Europe/Warsaw", "Europe/Warsaw"),
|
|
||||||
("Europe/Zagreb", "Europe/Zagreb"),
|
|
||||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
|
||||||
("Europe/Zurich", "Europe/Zurich"),
|
|
||||||
("GB", "GB"),
|
|
||||||
("GB-Eire", "GB-Eire"),
|
|
||||||
("GMT", "GMT"),
|
|
||||||
("GMT+0", "GMT+0"),
|
|
||||||
("GMT-0", "GMT-0"),
|
|
||||||
("GMT0", "GMT0"),
|
|
||||||
("Greenwich", "Greenwich"),
|
|
||||||
("HST", "HST"),
|
|
||||||
("Hongkong", "Hongkong"),
|
|
||||||
("Iceland", "Iceland"),
|
|
||||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
|
||||||
("Indian/Chagos", "Indian/Chagos"),
|
|
||||||
("Indian/Christmas", "Indian/Christmas"),
|
|
||||||
("Indian/Cocos", "Indian/Cocos"),
|
|
||||||
("Indian/Comoro", "Indian/Comoro"),
|
|
||||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
|
||||||
("Indian/Mahe", "Indian/Mahe"),
|
|
||||||
("Indian/Maldives", "Indian/Maldives"),
|
|
||||||
("Indian/Mauritius", "Indian/Mauritius"),
|
|
||||||
("Indian/Mayotte", "Indian/Mayotte"),
|
|
||||||
("Indian/Reunion", "Indian/Reunion"),
|
|
||||||
("Iran", "Iran"),
|
|
||||||
("Israel", "Israel"),
|
|
||||||
("Jamaica", "Jamaica"),
|
|
||||||
("Japan", "Japan"),
|
|
||||||
("Kwajalein", "Kwajalein"),
|
|
||||||
("Libya", "Libya"),
|
|
||||||
("MET", "MET"),
|
|
||||||
("MST", "MST"),
|
|
||||||
("MST7MDT", "MST7MDT"),
|
|
||||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
|
||||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
|
||||||
("Mexico/General", "Mexico/General"),
|
|
||||||
("NZ", "NZ"),
|
|
||||||
("NZ-CHAT", "NZ-CHAT"),
|
|
||||||
("Navajo", "Navajo"),
|
|
||||||
("PRC", "PRC"),
|
|
||||||
("PST8PDT", "PST8PDT"),
|
|
||||||
("Pacific/Apia", "Pacific/Apia"),
|
|
||||||
("Pacific/Auckland", "Pacific/Auckland"),
|
|
||||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
|
||||||
("Pacific/Chatham", "Pacific/Chatham"),
|
|
||||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
|
||||||
("Pacific/Easter", "Pacific/Easter"),
|
|
||||||
("Pacific/Efate", "Pacific/Efate"),
|
|
||||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
|
||||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
|
||||||
("Pacific/Fiji", "Pacific/Fiji"),
|
|
||||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
|
||||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
|
||||||
("Pacific/Gambier", "Pacific/Gambier"),
|
|
||||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
|
||||||
("Pacific/Guam", "Pacific/Guam"),
|
|
||||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
|
||||||
("Pacific/Johnston", "Pacific/Johnston"),
|
|
||||||
("Pacific/Kanton", "Pacific/Kanton"),
|
|
||||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
|
||||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
|
||||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
|
||||||
("Pacific/Majuro", "Pacific/Majuro"),
|
|
||||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
|
||||||
("Pacific/Midway", "Pacific/Midway"),
|
|
||||||
("Pacific/Nauru", "Pacific/Nauru"),
|
|
||||||
("Pacific/Niue", "Pacific/Niue"),
|
|
||||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
|
||||||
("Pacific/Noumea", "Pacific/Noumea"),
|
|
||||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
|
||||||
("Pacific/Palau", "Pacific/Palau"),
|
|
||||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
|
||||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
|
||||||
("Pacific/Ponape", "Pacific/Ponape"),
|
|
||||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
|
||||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
|
||||||
("Pacific/Saipan", "Pacific/Saipan"),
|
|
||||||
("Pacific/Samoa", "Pacific/Samoa"),
|
|
||||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
|
||||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
|
||||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
|
||||||
("Pacific/Truk", "Pacific/Truk"),
|
|
||||||
("Pacific/Wake", "Pacific/Wake"),
|
|
||||||
("Pacific/Wallis", "Pacific/Wallis"),
|
|
||||||
("Pacific/Yap", "Pacific/Yap"),
|
|
||||||
("Poland", "Poland"),
|
|
||||||
("Portugal", "Portugal"),
|
|
||||||
("ROC", "ROC"),
|
|
||||||
("ROK", "ROK"),
|
|
||||||
("Singapore", "Singapore"),
|
|
||||||
("Turkey", "Turkey"),
|
|
||||||
("UCT", "UCT"),
|
|
||||||
("US/Alaska", "US/Alaska"),
|
|
||||||
("US/Aleutian", "US/Aleutian"),
|
|
||||||
("US/Arizona", "US/Arizona"),
|
|
||||||
("US/Central", "US/Central"),
|
|
||||||
("US/East-Indiana", "US/East-Indiana"),
|
|
||||||
("US/Eastern", "US/Eastern"),
|
|
||||||
("US/Hawaii", "US/Hawaii"),
|
|
||||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
|
||||||
("US/Michigan", "US/Michigan"),
|
|
||||||
("US/Mountain", "US/Mountain"),
|
|
||||||
("US/Pacific", "US/Pacific"),
|
|
||||||
("US/Samoa", "US/Samoa"),
|
|
||||||
("UTC", "UTC"),
|
|
||||||
("Universal", "Universal"),
|
|
||||||
("W-SU", "W-SU"),
|
|
||||||
("WET", "WET"),
|
|
||||||
("Zulu", "Zulu"),
|
|
||||||
],
|
|
||||||
max_length=255,
|
|
||||||
null=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
# Generated by Django 4.2.3 on 2023-07-18 01:15
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("core", "0037_coresettings_open_ai_model_and_more"),
|
|
||||||
("agents", "0056_alter_agent_time_zone"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name="agentcustomfield",
|
|
||||||
unique_together={("agent", "field")},
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,633 +0,0 @@
|
|||||||
# Generated by Django 4.2.7 on 2023-11-09 19:56
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("agents", "0057_alter_agentcustomfield_unique_together"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="agent",
|
|
||||||
name="time_zone",
|
|
||||||
field=models.CharField(
|
|
||||||
blank=True,
|
|
||||||
choices=[
|
|
||||||
("Africa/Abidjan", "Africa/Abidjan"),
|
|
||||||
("Africa/Accra", "Africa/Accra"),
|
|
||||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
|
||||||
("Africa/Algiers", "Africa/Algiers"),
|
|
||||||
("Africa/Asmara", "Africa/Asmara"),
|
|
||||||
("Africa/Asmera", "Africa/Asmera"),
|
|
||||||
("Africa/Bamako", "Africa/Bamako"),
|
|
||||||
("Africa/Bangui", "Africa/Bangui"),
|
|
||||||
("Africa/Banjul", "Africa/Banjul"),
|
|
||||||
("Africa/Bissau", "Africa/Bissau"),
|
|
||||||
("Africa/Blantyre", "Africa/Blantyre"),
|
|
||||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
|
||||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
|
||||||
("Africa/Cairo", "Africa/Cairo"),
|
|
||||||
("Africa/Casablanca", "Africa/Casablanca"),
|
|
||||||
("Africa/Ceuta", "Africa/Ceuta"),
|
|
||||||
("Africa/Conakry", "Africa/Conakry"),
|
|
||||||
("Africa/Dakar", "Africa/Dakar"),
|
|
||||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
|
||||||
("Africa/Djibouti", "Africa/Djibouti"),
|
|
||||||
("Africa/Douala", "Africa/Douala"),
|
|
||||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
|
||||||
("Africa/Freetown", "Africa/Freetown"),
|
|
||||||
("Africa/Gaborone", "Africa/Gaborone"),
|
|
||||||
("Africa/Harare", "Africa/Harare"),
|
|
||||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
|
||||||
("Africa/Juba", "Africa/Juba"),
|
|
||||||
("Africa/Kampala", "Africa/Kampala"),
|
|
||||||
("Africa/Khartoum", "Africa/Khartoum"),
|
|
||||||
("Africa/Kigali", "Africa/Kigali"),
|
|
||||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
|
||||||
("Africa/Lagos", "Africa/Lagos"),
|
|
||||||
("Africa/Libreville", "Africa/Libreville"),
|
|
||||||
("Africa/Lome", "Africa/Lome"),
|
|
||||||
("Africa/Luanda", "Africa/Luanda"),
|
|
||||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
|
||||||
("Africa/Lusaka", "Africa/Lusaka"),
|
|
||||||
("Africa/Malabo", "Africa/Malabo"),
|
|
||||||
("Africa/Maputo", "Africa/Maputo"),
|
|
||||||
("Africa/Maseru", "Africa/Maseru"),
|
|
||||||
("Africa/Mbabane", "Africa/Mbabane"),
|
|
||||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
|
||||||
("Africa/Monrovia", "Africa/Monrovia"),
|
|
||||||
("Africa/Nairobi", "Africa/Nairobi"),
|
|
||||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
|
||||||
("Africa/Niamey", "Africa/Niamey"),
|
|
||||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
|
||||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
|
||||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
|
||||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
|
||||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
|
||||||
("Africa/Tripoli", "Africa/Tripoli"),
|
|
||||||
("Africa/Tunis", "Africa/Tunis"),
|
|
||||||
("Africa/Windhoek", "Africa/Windhoek"),
|
|
||||||
("America/Adak", "America/Adak"),
|
|
||||||
("America/Anchorage", "America/Anchorage"),
|
|
||||||
("America/Anguilla", "America/Anguilla"),
|
|
||||||
("America/Antigua", "America/Antigua"),
|
|
||||||
("America/Araguaina", "America/Araguaina"),
|
|
||||||
(
|
|
||||||
"America/Argentina/Buenos_Aires",
|
|
||||||
"America/Argentina/Buenos_Aires",
|
|
||||||
),
|
|
||||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
|
||||||
(
|
|
||||||
"America/Argentina/ComodRivadavia",
|
|
||||||
"America/Argentina/ComodRivadavia",
|
|
||||||
),
|
|
||||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
|
||||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
|
||||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
|
||||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
|
||||||
(
|
|
||||||
"America/Argentina/Rio_Gallegos",
|
|
||||||
"America/Argentina/Rio_Gallegos",
|
|
||||||
),
|
|
||||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
|
||||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
|
||||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
|
||||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
|
||||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
|
||||||
("America/Aruba", "America/Aruba"),
|
|
||||||
("America/Asuncion", "America/Asuncion"),
|
|
||||||
("America/Atikokan", "America/Atikokan"),
|
|
||||||
("America/Atka", "America/Atka"),
|
|
||||||
("America/Bahia", "America/Bahia"),
|
|
||||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
|
||||||
("America/Barbados", "America/Barbados"),
|
|
||||||
("America/Belem", "America/Belem"),
|
|
||||||
("America/Belize", "America/Belize"),
|
|
||||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
|
||||||
("America/Boa_Vista", "America/Boa_Vista"),
|
|
||||||
("America/Bogota", "America/Bogota"),
|
|
||||||
("America/Boise", "America/Boise"),
|
|
||||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
|
||||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
|
||||||
("America/Campo_Grande", "America/Campo_Grande"),
|
|
||||||
("America/Cancun", "America/Cancun"),
|
|
||||||
("America/Caracas", "America/Caracas"),
|
|
||||||
("America/Catamarca", "America/Catamarca"),
|
|
||||||
("America/Cayenne", "America/Cayenne"),
|
|
||||||
("America/Cayman", "America/Cayman"),
|
|
||||||
("America/Chicago", "America/Chicago"),
|
|
||||||
("America/Chihuahua", "America/Chihuahua"),
|
|
||||||
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
|
|
||||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
|
||||||
("America/Cordoba", "America/Cordoba"),
|
|
||||||
("America/Costa_Rica", "America/Costa_Rica"),
|
|
||||||
("America/Creston", "America/Creston"),
|
|
||||||
("America/Cuiaba", "America/Cuiaba"),
|
|
||||||
("America/Curacao", "America/Curacao"),
|
|
||||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
|
||||||
("America/Dawson", "America/Dawson"),
|
|
||||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
|
||||||
("America/Denver", "America/Denver"),
|
|
||||||
("America/Detroit", "America/Detroit"),
|
|
||||||
("America/Dominica", "America/Dominica"),
|
|
||||||
("America/Edmonton", "America/Edmonton"),
|
|
||||||
("America/Eirunepe", "America/Eirunepe"),
|
|
||||||
("America/El_Salvador", "America/El_Salvador"),
|
|
||||||
("America/Ensenada", "America/Ensenada"),
|
|
||||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
|
||||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
|
||||||
("America/Fortaleza", "America/Fortaleza"),
|
|
||||||
("America/Glace_Bay", "America/Glace_Bay"),
|
|
||||||
("America/Godthab", "America/Godthab"),
|
|
||||||
("America/Goose_Bay", "America/Goose_Bay"),
|
|
||||||
("America/Grand_Turk", "America/Grand_Turk"),
|
|
||||||
("America/Grenada", "America/Grenada"),
|
|
||||||
("America/Guadeloupe", "America/Guadeloupe"),
|
|
||||||
("America/Guatemala", "America/Guatemala"),
|
|
||||||
("America/Guayaquil", "America/Guayaquil"),
|
|
||||||
("America/Guyana", "America/Guyana"),
|
|
||||||
("America/Halifax", "America/Halifax"),
|
|
||||||
("America/Havana", "America/Havana"),
|
|
||||||
("America/Hermosillo", "America/Hermosillo"),
|
|
||||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
|
||||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
|
||||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
|
||||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
|
||||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
|
||||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
|
||||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
|
||||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
|
||||||
("America/Indianapolis", "America/Indianapolis"),
|
|
||||||
("America/Inuvik", "America/Inuvik"),
|
|
||||||
("America/Iqaluit", "America/Iqaluit"),
|
|
||||||
("America/Jamaica", "America/Jamaica"),
|
|
||||||
("America/Jujuy", "America/Jujuy"),
|
|
||||||
("America/Juneau", "America/Juneau"),
|
|
||||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
|
||||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
|
||||||
("America/Knox_IN", "America/Knox_IN"),
|
|
||||||
("America/Kralendijk", "America/Kralendijk"),
|
|
||||||
("America/La_Paz", "America/La_Paz"),
|
|
||||||
("America/Lima", "America/Lima"),
|
|
||||||
("America/Los_Angeles", "America/Los_Angeles"),
|
|
||||||
("America/Louisville", "America/Louisville"),
|
|
||||||
("America/Lower_Princes", "America/Lower_Princes"),
|
|
||||||
("America/Maceio", "America/Maceio"),
|
|
||||||
("America/Managua", "America/Managua"),
|
|
||||||
("America/Manaus", "America/Manaus"),
|
|
||||||
("America/Marigot", "America/Marigot"),
|
|
||||||
("America/Martinique", "America/Martinique"),
|
|
||||||
("America/Matamoros", "America/Matamoros"),
|
|
||||||
("America/Mazatlan", "America/Mazatlan"),
|
|
||||||
("America/Mendoza", "America/Mendoza"),
|
|
||||||
("America/Menominee", "America/Menominee"),
|
|
||||||
("America/Merida", "America/Merida"),
|
|
||||||
("America/Metlakatla", "America/Metlakatla"),
|
|
||||||
("America/Mexico_City", "America/Mexico_City"),
|
|
||||||
("America/Miquelon", "America/Miquelon"),
|
|
||||||
("America/Moncton", "America/Moncton"),
|
|
||||||
("America/Monterrey", "America/Monterrey"),
|
|
||||||
("America/Montevideo", "America/Montevideo"),
|
|
||||||
("America/Montreal", "America/Montreal"),
|
|
||||||
("America/Montserrat", "America/Montserrat"),
|
|
||||||
("America/Nassau", "America/Nassau"),
|
|
||||||
("America/New_York", "America/New_York"),
|
|
||||||
("America/Nipigon", "America/Nipigon"),
|
|
||||||
("America/Nome", "America/Nome"),
|
|
||||||
("America/Noronha", "America/Noronha"),
|
|
||||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
|
||||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
|
||||||
(
|
|
||||||
"America/North_Dakota/New_Salem",
|
|
||||||
"America/North_Dakota/New_Salem",
|
|
||||||
),
|
|
||||||
("America/Nuuk", "America/Nuuk"),
|
|
||||||
("America/Ojinaga", "America/Ojinaga"),
|
|
||||||
("America/Panama", "America/Panama"),
|
|
||||||
("America/Pangnirtung", "America/Pangnirtung"),
|
|
||||||
("America/Paramaribo", "America/Paramaribo"),
|
|
||||||
("America/Phoenix", "America/Phoenix"),
|
|
||||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
|
||||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
|
||||||
("America/Porto_Acre", "America/Porto_Acre"),
|
|
||||||
("America/Porto_Velho", "America/Porto_Velho"),
|
|
||||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
|
||||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
|
||||||
("America/Rainy_River", "America/Rainy_River"),
|
|
||||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
|
||||||
("America/Recife", "America/Recife"),
|
|
||||||
("America/Regina", "America/Regina"),
|
|
||||||
("America/Resolute", "America/Resolute"),
|
|
||||||
("America/Rio_Branco", "America/Rio_Branco"),
|
|
||||||
("America/Rosario", "America/Rosario"),
|
|
||||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
|
||||||
("America/Santarem", "America/Santarem"),
|
|
||||||
("America/Santiago", "America/Santiago"),
|
|
||||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
|
||||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
|
||||||
("America/Scoresbysund", "America/Scoresbysund"),
|
|
||||||
("America/Shiprock", "America/Shiprock"),
|
|
||||||
("America/Sitka", "America/Sitka"),
|
|
||||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
|
||||||
("America/St_Johns", "America/St_Johns"),
|
|
||||||
("America/St_Kitts", "America/St_Kitts"),
|
|
||||||
("America/St_Lucia", "America/St_Lucia"),
|
|
||||||
("America/St_Thomas", "America/St_Thomas"),
|
|
||||||
("America/St_Vincent", "America/St_Vincent"),
|
|
||||||
("America/Swift_Current", "America/Swift_Current"),
|
|
||||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
|
||||||
("America/Thule", "America/Thule"),
|
|
||||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
|
||||||
("America/Tijuana", "America/Tijuana"),
|
|
||||||
("America/Toronto", "America/Toronto"),
|
|
||||||
("America/Tortola", "America/Tortola"),
|
|
||||||
("America/Vancouver", "America/Vancouver"),
|
|
||||||
("America/Virgin", "America/Virgin"),
|
|
||||||
("America/Whitehorse", "America/Whitehorse"),
|
|
||||||
("America/Winnipeg", "America/Winnipeg"),
|
|
||||||
("America/Yakutat", "America/Yakutat"),
|
|
||||||
("America/Yellowknife", "America/Yellowknife"),
|
|
||||||
("Antarctica/Casey", "Antarctica/Casey"),
|
|
||||||
("Antarctica/Davis", "Antarctica/Davis"),
|
|
||||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
|
||||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
|
||||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
|
||||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
|
||||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
|
||||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
|
||||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
|
||||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
|
||||||
("Antarctica/Troll", "Antarctica/Troll"),
|
|
||||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
|
||||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
|
||||||
("Asia/Aden", "Asia/Aden"),
|
|
||||||
("Asia/Almaty", "Asia/Almaty"),
|
|
||||||
("Asia/Amman", "Asia/Amman"),
|
|
||||||
("Asia/Anadyr", "Asia/Anadyr"),
|
|
||||||
("Asia/Aqtau", "Asia/Aqtau"),
|
|
||||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
|
||||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
|
||||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
|
||||||
("Asia/Atyrau", "Asia/Atyrau"),
|
|
||||||
("Asia/Baghdad", "Asia/Baghdad"),
|
|
||||||
("Asia/Bahrain", "Asia/Bahrain"),
|
|
||||||
("Asia/Baku", "Asia/Baku"),
|
|
||||||
("Asia/Bangkok", "Asia/Bangkok"),
|
|
||||||
("Asia/Barnaul", "Asia/Barnaul"),
|
|
||||||
("Asia/Beirut", "Asia/Beirut"),
|
|
||||||
("Asia/Bishkek", "Asia/Bishkek"),
|
|
||||||
("Asia/Brunei", "Asia/Brunei"),
|
|
||||||
("Asia/Calcutta", "Asia/Calcutta"),
|
|
||||||
("Asia/Chita", "Asia/Chita"),
|
|
||||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
|
||||||
("Asia/Chongqing", "Asia/Chongqing"),
|
|
||||||
("Asia/Chungking", "Asia/Chungking"),
|
|
||||||
("Asia/Colombo", "Asia/Colombo"),
|
|
||||||
("Asia/Dacca", "Asia/Dacca"),
|
|
||||||
("Asia/Damascus", "Asia/Damascus"),
|
|
||||||
("Asia/Dhaka", "Asia/Dhaka"),
|
|
||||||
("Asia/Dili", "Asia/Dili"),
|
|
||||||
("Asia/Dubai", "Asia/Dubai"),
|
|
||||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
|
||||||
("Asia/Famagusta", "Asia/Famagusta"),
|
|
||||||
("Asia/Gaza", "Asia/Gaza"),
|
|
||||||
("Asia/Harbin", "Asia/Harbin"),
|
|
||||||
("Asia/Hebron", "Asia/Hebron"),
|
|
||||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
|
||||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
|
||||||
("Asia/Hovd", "Asia/Hovd"),
|
|
||||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
|
||||||
("Asia/Istanbul", "Asia/Istanbul"),
|
|
||||||
("Asia/Jakarta", "Asia/Jakarta"),
|
|
||||||
("Asia/Jayapura", "Asia/Jayapura"),
|
|
||||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
|
||||||
("Asia/Kabul", "Asia/Kabul"),
|
|
||||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
|
||||||
("Asia/Karachi", "Asia/Karachi"),
|
|
||||||
("Asia/Kashgar", "Asia/Kashgar"),
|
|
||||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
|
||||||
("Asia/Katmandu", "Asia/Katmandu"),
|
|
||||||
("Asia/Khandyga", "Asia/Khandyga"),
|
|
||||||
("Asia/Kolkata", "Asia/Kolkata"),
|
|
||||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
|
||||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
|
||||||
("Asia/Kuching", "Asia/Kuching"),
|
|
||||||
("Asia/Kuwait", "Asia/Kuwait"),
|
|
||||||
("Asia/Macao", "Asia/Macao"),
|
|
||||||
("Asia/Macau", "Asia/Macau"),
|
|
||||||
("Asia/Magadan", "Asia/Magadan"),
|
|
||||||
("Asia/Makassar", "Asia/Makassar"),
|
|
||||||
("Asia/Manila", "Asia/Manila"),
|
|
||||||
("Asia/Muscat", "Asia/Muscat"),
|
|
||||||
("Asia/Nicosia", "Asia/Nicosia"),
|
|
||||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
|
||||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
|
||||||
("Asia/Omsk", "Asia/Omsk"),
|
|
||||||
("Asia/Oral", "Asia/Oral"),
|
|
||||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
|
||||||
("Asia/Pontianak", "Asia/Pontianak"),
|
|
||||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
|
||||||
("Asia/Qatar", "Asia/Qatar"),
|
|
||||||
("Asia/Qostanay", "Asia/Qostanay"),
|
|
||||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
|
||||||
("Asia/Rangoon", "Asia/Rangoon"),
|
|
||||||
("Asia/Riyadh", "Asia/Riyadh"),
|
|
||||||
("Asia/Saigon", "Asia/Saigon"),
|
|
||||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
|
||||||
("Asia/Samarkand", "Asia/Samarkand"),
|
|
||||||
("Asia/Seoul", "Asia/Seoul"),
|
|
||||||
("Asia/Shanghai", "Asia/Shanghai"),
|
|
||||||
("Asia/Singapore", "Asia/Singapore"),
|
|
||||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
|
||||||
("Asia/Taipei", "Asia/Taipei"),
|
|
||||||
("Asia/Tashkent", "Asia/Tashkent"),
|
|
||||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
|
||||||
("Asia/Tehran", "Asia/Tehran"),
|
|
||||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
|
||||||
("Asia/Thimbu", "Asia/Thimbu"),
|
|
||||||
("Asia/Thimphu", "Asia/Thimphu"),
|
|
||||||
("Asia/Tokyo", "Asia/Tokyo"),
|
|
||||||
("Asia/Tomsk", "Asia/Tomsk"),
|
|
||||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
|
||||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
|
||||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
|
||||||
("Asia/Urumqi", "Asia/Urumqi"),
|
|
||||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
|
||||||
("Asia/Vientiane", "Asia/Vientiane"),
|
|
||||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
|
||||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
|
||||||
("Asia/Yangon", "Asia/Yangon"),
|
|
||||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
|
||||||
("Asia/Yerevan", "Asia/Yerevan"),
|
|
||||||
("Atlantic/Azores", "Atlantic/Azores"),
|
|
||||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
|
||||||
("Atlantic/Canary", "Atlantic/Canary"),
|
|
||||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
|
||||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
|
||||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
|
||||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
|
||||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
|
||||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
|
||||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
|
||||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
|
||||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
|
||||||
("Australia/ACT", "Australia/ACT"),
|
|
||||||
("Australia/Adelaide", "Australia/Adelaide"),
|
|
||||||
("Australia/Brisbane", "Australia/Brisbane"),
|
|
||||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
|
||||||
("Australia/Canberra", "Australia/Canberra"),
|
|
||||||
("Australia/Currie", "Australia/Currie"),
|
|
||||||
("Australia/Darwin", "Australia/Darwin"),
|
|
||||||
("Australia/Eucla", "Australia/Eucla"),
|
|
||||||
("Australia/Hobart", "Australia/Hobart"),
|
|
||||||
("Australia/LHI", "Australia/LHI"),
|
|
||||||
("Australia/Lindeman", "Australia/Lindeman"),
|
|
||||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
|
||||||
("Australia/Melbourne", "Australia/Melbourne"),
|
|
||||||
("Australia/NSW", "Australia/NSW"),
|
|
||||||
("Australia/North", "Australia/North"),
|
|
||||||
("Australia/Perth", "Australia/Perth"),
|
|
||||||
("Australia/Queensland", "Australia/Queensland"),
|
|
||||||
("Australia/South", "Australia/South"),
|
|
||||||
("Australia/Sydney", "Australia/Sydney"),
|
|
||||||
("Australia/Tasmania", "Australia/Tasmania"),
|
|
||||||
("Australia/Victoria", "Australia/Victoria"),
|
|
||||||
("Australia/West", "Australia/West"),
|
|
||||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
|
||||||
("Brazil/Acre", "Brazil/Acre"),
|
|
||||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
|
||||||
("Brazil/East", "Brazil/East"),
|
|
||||||
("Brazil/West", "Brazil/West"),
|
|
||||||
("CET", "CET"),
|
|
||||||
("CST6CDT", "CST6CDT"),
|
|
||||||
("Canada/Atlantic", "Canada/Atlantic"),
|
|
||||||
("Canada/Central", "Canada/Central"),
|
|
||||||
("Canada/Eastern", "Canada/Eastern"),
|
|
||||||
("Canada/Mountain", "Canada/Mountain"),
|
|
||||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
|
||||||
("Canada/Pacific", "Canada/Pacific"),
|
|
||||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
|
||||||
("Canada/Yukon", "Canada/Yukon"),
|
|
||||||
("Chile/Continental", "Chile/Continental"),
|
|
||||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
|
||||||
("Cuba", "Cuba"),
|
|
||||||
("EET", "EET"),
|
|
||||||
("EST", "EST"),
|
|
||||||
("EST5EDT", "EST5EDT"),
|
|
||||||
("Egypt", "Egypt"),
|
|
||||||
("Eire", "Eire"),
|
|
||||||
("Etc/GMT", "Etc/GMT"),
|
|
||||||
("Etc/GMT+0", "Etc/GMT+0"),
|
|
||||||
("Etc/GMT+1", "Etc/GMT+1"),
|
|
||||||
("Etc/GMT+10", "Etc/GMT+10"),
|
|
||||||
("Etc/GMT+11", "Etc/GMT+11"),
|
|
||||||
("Etc/GMT+12", "Etc/GMT+12"),
|
|
||||||
("Etc/GMT+2", "Etc/GMT+2"),
|
|
||||||
("Etc/GMT+3", "Etc/GMT+3"),
|
|
||||||
("Etc/GMT+4", "Etc/GMT+4"),
|
|
||||||
("Etc/GMT+5", "Etc/GMT+5"),
|
|
||||||
("Etc/GMT+6", "Etc/GMT+6"),
|
|
||||||
("Etc/GMT+7", "Etc/GMT+7"),
|
|
||||||
("Etc/GMT+8", "Etc/GMT+8"),
|
|
||||||
("Etc/GMT+9", "Etc/GMT+9"),
|
|
||||||
("Etc/GMT-0", "Etc/GMT-0"),
|
|
||||||
("Etc/GMT-1", "Etc/GMT-1"),
|
|
||||||
("Etc/GMT-10", "Etc/GMT-10"),
|
|
||||||
("Etc/GMT-11", "Etc/GMT-11"),
|
|
||||||
("Etc/GMT-12", "Etc/GMT-12"),
|
|
||||||
("Etc/GMT-13", "Etc/GMT-13"),
|
|
||||||
("Etc/GMT-14", "Etc/GMT-14"),
|
|
||||||
("Etc/GMT-2", "Etc/GMT-2"),
|
|
||||||
("Etc/GMT-3", "Etc/GMT-3"),
|
|
||||||
("Etc/GMT-4", "Etc/GMT-4"),
|
|
||||||
("Etc/GMT-5", "Etc/GMT-5"),
|
|
||||||
("Etc/GMT-6", "Etc/GMT-6"),
|
|
||||||
("Etc/GMT-7", "Etc/GMT-7"),
|
|
||||||
("Etc/GMT-8", "Etc/GMT-8"),
|
|
||||||
("Etc/GMT-9", "Etc/GMT-9"),
|
|
||||||
("Etc/GMT0", "Etc/GMT0"),
|
|
||||||
("Etc/Greenwich", "Etc/Greenwich"),
|
|
||||||
("Etc/UCT", "Etc/UCT"),
|
|
||||||
("Etc/UTC", "Etc/UTC"),
|
|
||||||
("Etc/Universal", "Etc/Universal"),
|
|
||||||
("Etc/Zulu", "Etc/Zulu"),
|
|
||||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
|
||||||
("Europe/Andorra", "Europe/Andorra"),
|
|
||||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
|
||||||
("Europe/Athens", "Europe/Athens"),
|
|
||||||
("Europe/Belfast", "Europe/Belfast"),
|
|
||||||
("Europe/Belgrade", "Europe/Belgrade"),
|
|
||||||
("Europe/Berlin", "Europe/Berlin"),
|
|
||||||
("Europe/Bratislava", "Europe/Bratislava"),
|
|
||||||
("Europe/Brussels", "Europe/Brussels"),
|
|
||||||
("Europe/Bucharest", "Europe/Bucharest"),
|
|
||||||
("Europe/Budapest", "Europe/Budapest"),
|
|
||||||
("Europe/Busingen", "Europe/Busingen"),
|
|
||||||
("Europe/Chisinau", "Europe/Chisinau"),
|
|
||||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
|
||||||
("Europe/Dublin", "Europe/Dublin"),
|
|
||||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
|
||||||
("Europe/Guernsey", "Europe/Guernsey"),
|
|
||||||
("Europe/Helsinki", "Europe/Helsinki"),
|
|
||||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
|
||||||
("Europe/Istanbul", "Europe/Istanbul"),
|
|
||||||
("Europe/Jersey", "Europe/Jersey"),
|
|
||||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
|
||||||
("Europe/Kiev", "Europe/Kiev"),
|
|
||||||
("Europe/Kirov", "Europe/Kirov"),
|
|
||||||
("Europe/Kyiv", "Europe/Kyiv"),
|
|
||||||
("Europe/Lisbon", "Europe/Lisbon"),
|
|
||||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
|
||||||
("Europe/London", "Europe/London"),
|
|
||||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
|
||||||
("Europe/Madrid", "Europe/Madrid"),
|
|
||||||
("Europe/Malta", "Europe/Malta"),
|
|
||||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
|
||||||
("Europe/Minsk", "Europe/Minsk"),
|
|
||||||
("Europe/Monaco", "Europe/Monaco"),
|
|
||||||
("Europe/Moscow", "Europe/Moscow"),
|
|
||||||
("Europe/Nicosia", "Europe/Nicosia"),
|
|
||||||
("Europe/Oslo", "Europe/Oslo"),
|
|
||||||
("Europe/Paris", "Europe/Paris"),
|
|
||||||
("Europe/Podgorica", "Europe/Podgorica"),
|
|
||||||
("Europe/Prague", "Europe/Prague"),
|
|
||||||
("Europe/Riga", "Europe/Riga"),
|
|
||||||
("Europe/Rome", "Europe/Rome"),
|
|
||||||
("Europe/Samara", "Europe/Samara"),
|
|
||||||
("Europe/San_Marino", "Europe/San_Marino"),
|
|
||||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
|
||||||
("Europe/Saratov", "Europe/Saratov"),
|
|
||||||
("Europe/Simferopol", "Europe/Simferopol"),
|
|
||||||
("Europe/Skopje", "Europe/Skopje"),
|
|
||||||
("Europe/Sofia", "Europe/Sofia"),
|
|
||||||
("Europe/Stockholm", "Europe/Stockholm"),
|
|
||||||
("Europe/Tallinn", "Europe/Tallinn"),
|
|
||||||
("Europe/Tirane", "Europe/Tirane"),
|
|
||||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
|
||||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
|
||||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
|
||||||
("Europe/Vaduz", "Europe/Vaduz"),
|
|
||||||
("Europe/Vatican", "Europe/Vatican"),
|
|
||||||
("Europe/Vienna", "Europe/Vienna"),
|
|
||||||
("Europe/Vilnius", "Europe/Vilnius"),
|
|
||||||
("Europe/Volgograd", "Europe/Volgograd"),
|
|
||||||
("Europe/Warsaw", "Europe/Warsaw"),
|
|
||||||
("Europe/Zagreb", "Europe/Zagreb"),
|
|
||||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
|
||||||
("Europe/Zurich", "Europe/Zurich"),
|
|
||||||
("Factory", "Factory"),
|
|
||||||
("GB", "GB"),
|
|
||||||
("GB-Eire", "GB-Eire"),
|
|
||||||
("GMT", "GMT"),
|
|
||||||
("GMT+0", "GMT+0"),
|
|
||||||
("GMT-0", "GMT-0"),
|
|
||||||
("GMT0", "GMT0"),
|
|
||||||
("Greenwich", "Greenwich"),
|
|
||||||
("HST", "HST"),
|
|
||||||
("Hongkong", "Hongkong"),
|
|
||||||
("Iceland", "Iceland"),
|
|
||||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
|
||||||
("Indian/Chagos", "Indian/Chagos"),
|
|
||||||
("Indian/Christmas", "Indian/Christmas"),
|
|
||||||
("Indian/Cocos", "Indian/Cocos"),
|
|
||||||
("Indian/Comoro", "Indian/Comoro"),
|
|
||||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
|
||||||
("Indian/Mahe", "Indian/Mahe"),
|
|
||||||
("Indian/Maldives", "Indian/Maldives"),
|
|
||||||
("Indian/Mauritius", "Indian/Mauritius"),
|
|
||||||
("Indian/Mayotte", "Indian/Mayotte"),
|
|
||||||
("Indian/Reunion", "Indian/Reunion"),
|
|
||||||
("Iran", "Iran"),
|
|
||||||
("Israel", "Israel"),
|
|
||||||
("Jamaica", "Jamaica"),
|
|
||||||
("Japan", "Japan"),
|
|
||||||
("Kwajalein", "Kwajalein"),
|
|
||||||
("Libya", "Libya"),
|
|
||||||
("MET", "MET"),
|
|
||||||
("MST", "MST"),
|
|
||||||
("MST7MDT", "MST7MDT"),
|
|
||||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
|
||||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
|
||||||
("Mexico/General", "Mexico/General"),
|
|
||||||
("NZ", "NZ"),
|
|
||||||
("NZ-CHAT", "NZ-CHAT"),
|
|
||||||
("Navajo", "Navajo"),
|
|
||||||
("PRC", "PRC"),
|
|
||||||
("PST8PDT", "PST8PDT"),
|
|
||||||
("Pacific/Apia", "Pacific/Apia"),
|
|
||||||
("Pacific/Auckland", "Pacific/Auckland"),
|
|
||||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
|
||||||
("Pacific/Chatham", "Pacific/Chatham"),
|
|
||||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
|
||||||
("Pacific/Easter", "Pacific/Easter"),
|
|
||||||
("Pacific/Efate", "Pacific/Efate"),
|
|
||||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
|
||||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
|
||||||
("Pacific/Fiji", "Pacific/Fiji"),
|
|
||||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
|
||||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
|
||||||
("Pacific/Gambier", "Pacific/Gambier"),
|
|
||||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
|
||||||
("Pacific/Guam", "Pacific/Guam"),
|
|
||||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
|
||||||
("Pacific/Johnston", "Pacific/Johnston"),
|
|
||||||
("Pacific/Kanton", "Pacific/Kanton"),
|
|
||||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
|
||||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
|
||||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
|
||||||
("Pacific/Majuro", "Pacific/Majuro"),
|
|
||||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
|
||||||
("Pacific/Midway", "Pacific/Midway"),
|
|
||||||
("Pacific/Nauru", "Pacific/Nauru"),
|
|
||||||
("Pacific/Niue", "Pacific/Niue"),
|
|
||||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
|
||||||
("Pacific/Noumea", "Pacific/Noumea"),
|
|
||||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
|
||||||
("Pacific/Palau", "Pacific/Palau"),
|
|
||||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
|
||||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
|
||||||
("Pacific/Ponape", "Pacific/Ponape"),
|
|
||||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
|
||||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
|
||||||
("Pacific/Saipan", "Pacific/Saipan"),
|
|
||||||
("Pacific/Samoa", "Pacific/Samoa"),
|
|
||||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
|
||||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
|
||||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
|
||||||
("Pacific/Truk", "Pacific/Truk"),
|
|
||||||
("Pacific/Wake", "Pacific/Wake"),
|
|
||||||
("Pacific/Wallis", "Pacific/Wallis"),
|
|
||||||
("Pacific/Yap", "Pacific/Yap"),
|
|
||||||
("Poland", "Poland"),
|
|
||||||
("Portugal", "Portugal"),
|
|
||||||
("ROC", "ROC"),
|
|
||||||
("ROK", "ROK"),
|
|
||||||
("Singapore", "Singapore"),
|
|
||||||
("Turkey", "Turkey"),
|
|
||||||
("UCT", "UCT"),
|
|
||||||
("US/Alaska", "US/Alaska"),
|
|
||||||
("US/Aleutian", "US/Aleutian"),
|
|
||||||
("US/Arizona", "US/Arizona"),
|
|
||||||
("US/Central", "US/Central"),
|
|
||||||
("US/East-Indiana", "US/East-Indiana"),
|
|
||||||
("US/Eastern", "US/Eastern"),
|
|
||||||
("US/Hawaii", "US/Hawaii"),
|
|
||||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
|
||||||
("US/Michigan", "US/Michigan"),
|
|
||||||
("US/Mountain", "US/Mountain"),
|
|
||||||
("US/Pacific", "US/Pacific"),
|
|
||||||
("US/Samoa", "US/Samoa"),
|
|
||||||
("UTC", "UTC"),
|
|
||||||
("Universal", "Universal"),
|
|
||||||
("W-SU", "W-SU"),
|
|
||||||
("WET", "WET"),
|
|
||||||
("Zulu", "Zulu"),
|
|
||||||
("localtime", "localtime"),
|
|
||||||
],
|
|
||||||
max_length=255,
|
|
||||||
null=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("agents", "0058_alter_agent_time_zone"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="agenthistory",
|
|
||||||
name="id",
|
|
||||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
# Generated by Django 4.2.16 on 2024-10-05 20:39
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("core", "0047_alter_coresettings_notify_on_warning_alerts"),
|
|
||||||
("agents", "0059_alter_agenthistory_id"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="agenthistory",
|
|
||||||
name="collector_all_output",
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="agenthistory",
|
|
||||||
name="custom_field",
|
|
||||||
field=models.ForeignKey(
|
|
||||||
blank=True,
|
|
||||||
null=True,
|
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
|
||||||
related_name="history",
|
|
||||||
to="core.customfield",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="agenthistory",
|
|
||||||
name="save_to_agent_note",
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,51 +1,30 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
|
||||||
import re
|
import re
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from contextlib import suppress
|
from distutils.version import LooseVersion
|
||||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, cast
|
from typing import Any, Optional, List, Dict, Union, Sequence, cast, TYPE_CHECKING
|
||||||
|
from django.core.cache import cache
|
||||||
|
from packaging import version as pyver
|
||||||
|
|
||||||
import msgpack
|
import msgpack
|
||||||
import nats
|
import nats
|
||||||
import validators
|
import validators
|
||||||
|
from asgiref.sync import sync_to_async
|
||||||
|
from core.models import TZ_CHOICES
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.core.cache import cache
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
from logs.models import BaseAuditModel, DebugLog
|
||||||
from nats.errors import TimeoutError
|
from nats.errors import TimeoutError
|
||||||
from packaging import version as pyver
|
|
||||||
from packaging.version import Version as LooseVersion
|
|
||||||
|
|
||||||
from agents.utils import get_agent_url
|
from core.utils import get_core_settings
|
||||||
from checks.models import CheckResult
|
|
||||||
from core.models import TZ_CHOICES
|
|
||||||
from core.utils import _b64_to_hex, get_core_settings, send_command_with_mesh
|
|
||||||
from logs.models import BaseAuditModel, DebugLog, PendingAction
|
|
||||||
from tacticalrmm.constants import (
|
|
||||||
AGENT_STATUS_OFFLINE,
|
|
||||||
AGENT_STATUS_ONLINE,
|
|
||||||
AGENT_STATUS_OVERDUE,
|
|
||||||
AGENT_TBL_PEND_ACTION_CNT_CACHE_PREFIX,
|
|
||||||
ONLINE_AGENTS,
|
|
||||||
AgentHistoryType,
|
|
||||||
AgentMonType,
|
|
||||||
AgentPlat,
|
|
||||||
AlertSeverity,
|
|
||||||
CheckStatus,
|
|
||||||
CheckType,
|
|
||||||
CustomFieldType,
|
|
||||||
DebugLogType,
|
|
||||||
GoArch,
|
|
||||||
PAAction,
|
|
||||||
PAStatus,
|
|
||||||
)
|
|
||||||
from tacticalrmm.helpers import has_script_actions, has_webhook, setup_nats_options
|
|
||||||
from tacticalrmm.models import PermissionQuerySet
|
from tacticalrmm.models import PermissionQuerySet
|
||||||
|
from tacticalrmm.constants import ONLINE_AGENTS
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from alerts.models import Alert, AlertTemplate
|
|
||||||
from automation.models import Policy
|
from automation.models import Policy
|
||||||
|
from alerts.models import AlertTemplate, Alert
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
from checks.models import Check
|
from checks.models import Check
|
||||||
from clients.models import Client
|
from clients.models import Client
|
||||||
@@ -54,8 +33,6 @@ if TYPE_CHECKING:
|
|||||||
# type helpers
|
# type helpers
|
||||||
Disk = Union[Dict[str, Any], str]
|
Disk = Union[Dict[str, Any], str]
|
||||||
|
|
||||||
logger = logging.getLogger("trmm")
|
|
||||||
|
|
||||||
|
|
||||||
class Agent(BaseAuditModel):
|
class Agent(BaseAuditModel):
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -67,12 +44,9 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
version = models.CharField(default="0.1.0", max_length=255)
|
version = models.CharField(default="0.1.0", max_length=255)
|
||||||
operating_system = models.CharField(null=True, blank=True, max_length=255)
|
operating_system = models.CharField(null=True, blank=True, max_length=255)
|
||||||
plat: "AgentPlat" = models.CharField( # type: ignore
|
plat = models.CharField(max_length=255, default="windows")
|
||||||
max_length=255, choices=AgentPlat.choices, default=AgentPlat.WINDOWS
|
goarch = models.CharField(max_length=255, null=True, blank=True)
|
||||||
)
|
plat_release = models.CharField(max_length=255, null=True, blank=True)
|
||||||
goarch: "GoArch" = models.CharField( # type: ignore
|
|
||||||
max_length=255, choices=GoArch.choices, null=True, blank=True
|
|
||||||
)
|
|
||||||
hostname = models.CharField(max_length=255)
|
hostname = models.CharField(max_length=255)
|
||||||
agent_id = models.CharField(max_length=200, unique=True)
|
agent_id = models.CharField(max_length=200, unique=True)
|
||||||
last_seen = models.DateTimeField(null=True, blank=True)
|
last_seen = models.DateTimeField(null=True, blank=True)
|
||||||
@@ -83,9 +57,7 @@ class Agent(BaseAuditModel):
|
|||||||
boot_time = models.FloatField(null=True, blank=True)
|
boot_time = models.FloatField(null=True, blank=True)
|
||||||
logged_in_username = models.CharField(null=True, blank=True, max_length=255)
|
logged_in_username = models.CharField(null=True, blank=True, max_length=255)
|
||||||
last_logged_in_user = models.CharField(null=True, blank=True, max_length=255)
|
last_logged_in_user = models.CharField(null=True, blank=True, max_length=255)
|
||||||
monitoring_type = models.CharField(
|
monitoring_type = models.CharField(max_length=30)
|
||||||
max_length=30, choices=AgentMonType.choices, default=AgentMonType.SERVER
|
|
||||||
)
|
|
||||||
description = models.CharField(null=True, blank=True, max_length=255)
|
description = models.CharField(null=True, blank=True, max_length=255)
|
||||||
mesh_node_id = models.CharField(null=True, blank=True, max_length=255)
|
mesh_node_id = models.CharField(null=True, blank=True, max_length=255)
|
||||||
overdue_email_alert = models.BooleanField(default=False)
|
overdue_email_alert = models.BooleanField(default=False)
|
||||||
@@ -126,27 +98,6 @@ class Agent(BaseAuditModel):
|
|||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return self.hostname
|
return self.hostname
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
|
||||||
# prevent recursion since calling set_alert_template() also calls save()
|
|
||||||
if not hasattr(self, "_processing_set_alert_template"):
|
|
||||||
self._processing_set_alert_template = False
|
|
||||||
|
|
||||||
if self.pk and not self._processing_set_alert_template:
|
|
||||||
orig = Agent.objects.get(pk=self.pk)
|
|
||||||
mon_type_changed = self.monitoring_type != orig.monitoring_type
|
|
||||||
site_changed = self.site_id != orig.site_id
|
|
||||||
policy_changed = self.policy != orig.policy
|
|
||||||
block_inherit = (
|
|
||||||
self.block_policy_inheritance != orig.block_policy_inheritance
|
|
||||||
)
|
|
||||||
|
|
||||||
if mon_type_changed or site_changed or policy_changed or block_inherit:
|
|
||||||
self._processing_set_alert_template = True
|
|
||||||
self.set_alert_template()
|
|
||||||
self._processing_set_alert_template = False
|
|
||||||
|
|
||||||
super().save(*args, **kwargs)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def client(self) -> "Client":
|
def client(self) -> "Client":
|
||||||
return self.site.client
|
return self.site.client
|
||||||
@@ -156,14 +107,13 @@ class Agent(BaseAuditModel):
|
|||||||
# return the default timezone unless the timezone is explicity set per agent
|
# return the default timezone unless the timezone is explicity set per agent
|
||||||
if self.time_zone:
|
if self.time_zone:
|
||||||
return self.time_zone
|
return self.time_zone
|
||||||
|
else:
|
||||||
return get_core_settings().default_time_zone
|
return get_core_settings().default_time_zone
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_posix(self) -> bool:
|
def is_posix(self) -> bool:
|
||||||
return self.plat in {AgentPlat.LINUX, AgentPlat.DARWIN}
|
return self.plat == "linux" or self.plat == "darwin"
|
||||||
|
|
||||||
# DEPRECATED, use goarch instead
|
|
||||||
@property
|
@property
|
||||||
def arch(self) -> Optional[str]:
|
def arch(self) -> Optional[str]:
|
||||||
if self.is_posix:
|
if self.is_posix:
|
||||||
@@ -176,70 +126,41 @@ class Agent(BaseAuditModel):
|
|||||||
return "32"
|
return "32"
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def do_update(self, *, token: str = "", force: bool = False) -> str:
|
@property
|
||||||
ver = settings.LATEST_AGENT_VER
|
def winagent_dl(self) -> Optional[str]:
|
||||||
|
if self.arch == "64":
|
||||||
|
return settings.DL_64
|
||||||
|
elif self.arch == "32":
|
||||||
|
return settings.DL_32
|
||||||
|
return None
|
||||||
|
|
||||||
if not self.goarch:
|
@property
|
||||||
DebugLog.warning(
|
def win_inno_exe(self) -> Optional[str]:
|
||||||
agent=self,
|
if self.arch == "64":
|
||||||
log_type=DebugLogType.AGENT_ISSUES,
|
return f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||||
message=f"Unable to determine arch on {self.hostname}({self.agent_id}). Skipping agent update.",
|
elif self.arch == "32":
|
||||||
)
|
return f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe"
|
||||||
return "noarch"
|
return None
|
||||||
|
|
||||||
if pyver.parse(self.version) <= pyver.parse("1.3.0"):
|
|
||||||
return "not supported"
|
|
||||||
|
|
||||||
url = get_agent_url(goarch=self.goarch, plat=self.plat, token=token)
|
|
||||||
bin = f"tacticalagent-v{ver}-{self.plat}-{self.goarch}.exe"
|
|
||||||
|
|
||||||
if not force:
|
|
||||||
if self.pendingactions.filter( # type: ignore
|
|
||||||
action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING
|
|
||||||
).exists():
|
|
||||||
self.pendingactions.filter( # type: ignore
|
|
||||||
action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING
|
|
||||||
).delete()
|
|
||||||
|
|
||||||
PendingAction.objects.create(
|
|
||||||
agent=self,
|
|
||||||
action_type=PAAction.AGENT_UPDATE,
|
|
||||||
details={
|
|
||||||
"url": url,
|
|
||||||
"version": ver,
|
|
||||||
"inno": bin,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
nats_data = {
|
|
||||||
"func": "agentupdate",
|
|
||||||
"payload": {
|
|
||||||
"url": url,
|
|
||||||
"version": ver,
|
|
||||||
"inno": bin,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
asyncio.run(self.nats_cmd(nats_data, wait=False))
|
|
||||||
return "created"
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self) -> str:
|
def status(self) -> str:
|
||||||
now = djangotime.now()
|
offline = djangotime.now() - djangotime.timedelta(minutes=self.offline_time)
|
||||||
offline = now - djangotime.timedelta(minutes=self.offline_time)
|
overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time)
|
||||||
overdue = now - djangotime.timedelta(minutes=self.overdue_time)
|
|
||||||
|
|
||||||
if self.last_seen is not None:
|
if self.last_seen is not None:
|
||||||
if (self.last_seen < offline) and (self.last_seen > overdue):
|
if (self.last_seen < offline) and (self.last_seen > overdue):
|
||||||
return AGENT_STATUS_OFFLINE
|
return "offline"
|
||||||
elif (self.last_seen < offline) and (self.last_seen < overdue):
|
elif (self.last_seen < offline) and (self.last_seen < overdue):
|
||||||
return AGENT_STATUS_OVERDUE
|
return "overdue"
|
||||||
else:
|
else:
|
||||||
return AGENT_STATUS_ONLINE
|
return "online"
|
||||||
else:
|
else:
|
||||||
return AGENT_STATUS_OFFLINE
|
return "offline"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def checks(self) -> Dict[str, Any]:
|
def checks(self) -> Dict[str, Any]:
|
||||||
|
from checks.models import CheckResult
|
||||||
|
|
||||||
total, passing, failing, warning, info = 0, 0, 0, 0, 0
|
total, passing, failing, warning, info = 0, 0, 0, 0, 0
|
||||||
|
|
||||||
for check in self.get_checks_with_policies(exclude_overridden=True):
|
for check in self.get_checks_with_policies(exclude_overridden=True):
|
||||||
@@ -247,29 +168,23 @@ class Agent(BaseAuditModel):
|
|||||||
if (
|
if (
|
||||||
not hasattr(check.check_result, "status")
|
not hasattr(check.check_result, "status")
|
||||||
or isinstance(check.check_result, CheckResult)
|
or isinstance(check.check_result, CheckResult)
|
||||||
and check.check_result.status == CheckStatus.PASSING
|
and check.check_result.status == "passing"
|
||||||
):
|
):
|
||||||
passing += 1
|
passing += 1
|
||||||
elif (
|
elif (
|
||||||
isinstance(check.check_result, CheckResult)
|
isinstance(check.check_result, CheckResult)
|
||||||
and check.check_result.status == CheckStatus.FAILING
|
and check.check_result.status == "failing"
|
||||||
):
|
):
|
||||||
alert_severity = (
|
alert_severity = (
|
||||||
check.check_result.alert_severity
|
check.check_result.alert_severity
|
||||||
if check.check_type
|
if check.check_type in ["memory", "cpuload", "diskspace", "script"]
|
||||||
in (
|
|
||||||
CheckType.MEMORY,
|
|
||||||
CheckType.CPU_LOAD,
|
|
||||||
CheckType.DISK_SPACE,
|
|
||||||
CheckType.SCRIPT,
|
|
||||||
)
|
|
||||||
else check.alert_severity
|
else check.alert_severity
|
||||||
)
|
)
|
||||||
if alert_severity == AlertSeverity.ERROR:
|
if alert_severity == "error":
|
||||||
failing += 1
|
failing += 1
|
||||||
elif alert_severity == AlertSeverity.WARNING:
|
elif alert_severity == "warning":
|
||||||
warning += 1
|
warning += 1
|
||||||
elif alert_severity == AlertSeverity.INFO:
|
elif alert_severity == "info":
|
||||||
info += 1
|
info += 1
|
||||||
|
|
||||||
ret = {
|
ret = {
|
||||||
@@ -282,15 +197,6 @@ class Agent(BaseAuditModel):
|
|||||||
}
|
}
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@property
|
|
||||||
def pending_actions_count(self) -> int:
|
|
||||||
ret = cache.get(f"{AGENT_TBL_PEND_ACTION_CNT_CACHE_PREFIX}{self.pk}")
|
|
||||||
if ret is None:
|
|
||||||
ret = self.pendingactions.filter(status=PAStatus.PENDING).count()
|
|
||||||
cache.set(f"{AGENT_TBL_PEND_ACTION_CNT_CACHE_PREFIX}{self.pk}", ret, 600)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cpu_model(self) -> List[str]:
|
def cpu_model(self) -> List[str]:
|
||||||
if self.is_posix:
|
if self.is_posix:
|
||||||
@@ -303,20 +209,7 @@ class Agent(BaseAuditModel):
|
|||||||
try:
|
try:
|
||||||
cpus = self.wmi_detail["cpu"]
|
cpus = self.wmi_detail["cpu"]
|
||||||
for cpu in cpus:
|
for cpu in cpus:
|
||||||
name = [x["Name"] for x in cpu if "Name" in x][0]
|
ret.append([x["Name"] for x in cpu if "Name" in x][0])
|
||||||
lp, nc = "", ""
|
|
||||||
with suppress(Exception):
|
|
||||||
lp = [
|
|
||||||
x["NumberOfLogicalProcessors"]
|
|
||||||
for x in cpu
|
|
||||||
if "NumberOfCores" in x
|
|
||||||
][0]
|
|
||||||
nc = [x["NumberOfCores"] for x in cpu if "NumberOfCores" in x][0]
|
|
||||||
if lp and nc:
|
|
||||||
cpu_string = f"{name}, {nc}C/{lp}T"
|
|
||||||
else:
|
|
||||||
cpu_string = name
|
|
||||||
ret.append(cpu_string)
|
|
||||||
return ret
|
return ret
|
||||||
except:
|
except:
|
||||||
return ["unknown cpu model"]
|
return ["unknown cpu model"]
|
||||||
@@ -380,7 +273,7 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
if len(ret) == 1:
|
if len(ret) == 1:
|
||||||
return cast(str, ret[0])
|
return cast(str, ret[0])
|
||||||
|
else:
|
||||||
return ", ".join(ret) if ret else "error getting local ips"
|
return ", ".join(ret) if ret else "error getting local ips"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -391,7 +284,7 @@ class Agent(BaseAuditModel):
|
|||||||
except:
|
except:
|
||||||
return "error getting make/model"
|
return "error getting make/model"
|
||||||
|
|
||||||
with suppress(Exception):
|
try:
|
||||||
comp_sys = self.wmi_detail["comp_sys"][0]
|
comp_sys = self.wmi_detail["comp_sys"][0]
|
||||||
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
||||||
make = [x["Vendor"] for x in comp_sys_prod if "Vendor" in x][0]
|
make = [x["Vendor"] for x in comp_sys_prod if "Vendor" in x][0]
|
||||||
@@ -408,10 +301,14 @@ class Agent(BaseAuditModel):
|
|||||||
model = sysfam
|
model = sysfam
|
||||||
|
|
||||||
return f"{make} {model}"
|
return f"{make} {model}"
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
with suppress(Exception):
|
try:
|
||||||
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
||||||
return cast(str, [x["Version"] for x in comp_sys_prod if "Version" in x][0])
|
return cast(str, [x["Version"] for x in comp_sys_prod if "Version" in x][0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
return "unknown make/model"
|
return "unknown make/model"
|
||||||
|
|
||||||
@@ -444,23 +341,6 @@ class Agent(BaseAuditModel):
|
|||||||
except:
|
except:
|
||||||
return ["unknown disk"]
|
return ["unknown disk"]
|
||||||
|
|
||||||
@property
|
|
||||||
def serial_number(self) -> str:
|
|
||||||
if self.is_posix:
|
|
||||||
try:
|
|
||||||
return self.wmi_detail["serialnumber"]
|
|
||||||
except:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
try:
|
|
||||||
return self.wmi_detail["bios"][0][0]["SerialNumber"]
|
|
||||||
except:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hex_mesh_node_id(self) -> str:
|
|
||||||
return _b64_to_hex(self.mesh_node_id)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def online_agents(cls, min_version: str = "") -> "List[Agent]":
|
def online_agents(cls, min_version: str = "") -> "List[Agent]":
|
||||||
if min_version:
|
if min_version:
|
||||||
@@ -468,14 +348,10 @@ class Agent(BaseAuditModel):
|
|||||||
i
|
i
|
||||||
for i in cls.objects.only(*ONLINE_AGENTS)
|
for i in cls.objects.only(*ONLINE_AGENTS)
|
||||||
if pyver.parse(i.version) >= pyver.parse(min_version)
|
if pyver.parse(i.version) >= pyver.parse(min_version)
|
||||||
and i.status == AGENT_STATUS_ONLINE
|
and i.status == "online"
|
||||||
]
|
]
|
||||||
|
|
||||||
return [
|
return [i for i in cls.objects.only(*ONLINE_AGENTS) if i.status == "online"]
|
||||||
i
|
|
||||||
for i in cls.objects.only(*ONLINE_AGENTS)
|
|
||||||
if i.status == AGENT_STATUS_ONLINE
|
|
||||||
]
|
|
||||||
|
|
||||||
def is_supported_script(self, platforms: List[str]) -> bool:
|
def is_supported_script(self, platforms: List[str]) -> bool:
|
||||||
return self.plat.lower() in platforms if platforms else True
|
return self.plat.lower() in platforms if platforms else True
|
||||||
@@ -483,6 +359,7 @@ class Agent(BaseAuditModel):
|
|||||||
def get_checks_with_policies(
|
def get_checks_with_policies(
|
||||||
self, exclude_overridden: bool = False
|
self, exclude_overridden: bool = False
|
||||||
) -> "List[Check]":
|
) -> "List[Check]":
|
||||||
|
|
||||||
if exclude_overridden:
|
if exclude_overridden:
|
||||||
checks = (
|
checks = (
|
||||||
list(
|
list(
|
||||||
@@ -497,10 +374,12 @@ class Agent(BaseAuditModel):
|
|||||||
return self.add_check_results(checks)
|
return self.add_check_results(checks)
|
||||||
|
|
||||||
def get_tasks_with_policies(self) -> "List[AutomatedTask]":
|
def get_tasks_with_policies(self) -> "List[AutomatedTask]":
|
||||||
|
|
||||||
tasks = list(self.autotasks.all()) + self.get_tasks_from_policies()
|
tasks = list(self.autotasks.all()) + self.get_tasks_from_policies()
|
||||||
return self.add_task_results(tasks)
|
return self.add_task_results(tasks)
|
||||||
|
|
||||||
def add_task_results(self, tasks: "List[AutomatedTask]") -> "List[AutomatedTask]":
|
def add_task_results(self, tasks: "List[AutomatedTask]") -> "List[AutomatedTask]":
|
||||||
|
|
||||||
results = self.taskresults.all() # type: ignore
|
results = self.taskresults.all() # type: ignore
|
||||||
|
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
@@ -512,6 +391,7 @@ class Agent(BaseAuditModel):
|
|||||||
return tasks
|
return tasks
|
||||||
|
|
||||||
def add_check_results(self, checks: "List[Check]") -> "List[Check]":
|
def add_check_results(self, checks: "List[Check]") -> "List[Check]":
|
||||||
|
|
||||||
results = self.checkresults.all() # type: ignore
|
results = self.checkresults.all() # type: ignore
|
||||||
|
|
||||||
for check in checks:
|
for check in checks:
|
||||||
@@ -535,7 +415,7 @@ class Agent(BaseAuditModel):
|
|||||||
models.prefetch_related_objects(
|
models.prefetch_related_objects(
|
||||||
[
|
[
|
||||||
policy
|
policy
|
||||||
for policy in (self.policy, site_policy, client_policy, default_policy)
|
for policy in [self.policy, site_policy, client_policy, default_policy]
|
||||||
if policy
|
if policy
|
||||||
],
|
],
|
||||||
"excluded_agents",
|
"excluded_agents",
|
||||||
@@ -548,32 +428,24 @@ class Agent(BaseAuditModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"agent_policy": (
|
"agent_policy": self.policy
|
||||||
self.policy
|
|
||||||
if self.policy and not self.policy.is_agent_excluded(self)
|
if self.policy and not self.policy.is_agent_excluded(self)
|
||||||
else None
|
else None,
|
||||||
),
|
"site_policy": site_policy
|
||||||
"site_policy": (
|
|
||||||
site_policy
|
|
||||||
if (site_policy and not site_policy.is_agent_excluded(self))
|
if (site_policy and not site_policy.is_agent_excluded(self))
|
||||||
and not self.block_policy_inheritance
|
and not self.block_policy_inheritance
|
||||||
else None
|
else None,
|
||||||
),
|
"client_policy": client_policy
|
||||||
"client_policy": (
|
|
||||||
client_policy
|
|
||||||
if (client_policy and not client_policy.is_agent_excluded(self))
|
if (client_policy and not client_policy.is_agent_excluded(self))
|
||||||
and not self.block_policy_inheritance
|
and not self.block_policy_inheritance
|
||||||
and not self.site.block_policy_inheritance
|
and not self.site.block_policy_inheritance
|
||||||
else None
|
else None,
|
||||||
),
|
"default_policy": default_policy
|
||||||
"default_policy": (
|
|
||||||
default_policy
|
|
||||||
if (default_policy and not default_policy.is_agent_excluded(self))
|
if (default_policy and not default_policy.is_agent_excluded(self))
|
||||||
and not self.block_policy_inheritance
|
and not self.block_policy_inheritance
|
||||||
and not self.site.block_policy_inheritance
|
and not self.site.block_policy_inheritance
|
||||||
and not self.client.block_policy_inheritance
|
and not self.client.block_policy_inheritance
|
||||||
else None
|
else None,
|
||||||
),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def check_run_interval(self) -> int:
|
def check_run_interval(self) -> int:
|
||||||
@@ -581,6 +453,7 @@ class Agent(BaseAuditModel):
|
|||||||
# determine if any agent checks have a custom interval and set the lowest interval
|
# determine if any agent checks have a custom interval and set the lowest interval
|
||||||
for check in self.get_checks_with_policies():
|
for check in self.get_checks_with_policies():
|
||||||
if check.run_interval and check.run_interval < interval:
|
if check.run_interval and check.run_interval < interval:
|
||||||
|
|
||||||
# don't allow check runs less than 15s
|
# don't allow check runs less than 15s
|
||||||
interval = 15 if check.run_interval < 15 else check.run_interval
|
interval = 15 if check.run_interval < 15 else check.run_interval
|
||||||
|
|
||||||
@@ -595,19 +468,13 @@ class Agent(BaseAuditModel):
|
|||||||
wait: bool = False,
|
wait: bool = False,
|
||||||
run_on_any: bool = False,
|
run_on_any: bool = False,
|
||||||
history_pk: int = 0,
|
history_pk: int = 0,
|
||||||
run_as_user: bool = False,
|
|
||||||
env_vars: list[str] = [],
|
|
||||||
) -> Any:
|
) -> Any:
|
||||||
|
|
||||||
from scripts.models import Script
|
from scripts.models import Script
|
||||||
|
|
||||||
script = Script.objects.get(pk=scriptpk)
|
script = Script.objects.get(pk=scriptpk)
|
||||||
|
|
||||||
# always override if set on script model
|
|
||||||
if script.run_as_user:
|
|
||||||
run_as_user = True
|
|
||||||
|
|
||||||
parsed_args = script.parse_script_args(self, script.shell, args)
|
parsed_args = script.parse_script_args(self, script.shell, args)
|
||||||
parsed_env_vars = script.parse_script_env_vars(self, script.shell, env_vars)
|
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"func": "runscriptfull" if full else "runscript",
|
"func": "runscriptfull" if full else "runscript",
|
||||||
@@ -617,10 +484,6 @@ class Agent(BaseAuditModel):
|
|||||||
"code": script.code,
|
"code": script.code,
|
||||||
"shell": script.shell,
|
"shell": script.shell,
|
||||||
},
|
},
|
||||||
"run_as_user": run_as_user,
|
|
||||||
"env_vars": parsed_env_vars,
|
|
||||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
|
||||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if history_pk != 0:
|
if history_pk != 0:
|
||||||
@@ -656,7 +519,7 @@ class Agent(BaseAuditModel):
|
|||||||
def approve_updates(self) -> None:
|
def approve_updates(self) -> None:
|
||||||
patch_policy = self.get_patch_policy()
|
patch_policy = self.get_patch_policy()
|
||||||
|
|
||||||
severity_list = []
|
severity_list = list()
|
||||||
if patch_policy.critical == "approve":
|
if patch_policy.critical == "approve":
|
||||||
severity_list.append("Critical")
|
severity_list.append("Critical")
|
||||||
|
|
||||||
@@ -688,14 +551,17 @@ class Agent(BaseAuditModel):
|
|||||||
if not agent_policy:
|
if not agent_policy:
|
||||||
agent_policy = WinUpdatePolicy.objects.create(agent=self)
|
agent_policy = WinUpdatePolicy.objects.create(agent=self)
|
||||||
|
|
||||||
# Get the list of policies applied to the agent and select the
|
|
||||||
# highest priority one.
|
|
||||||
policies = self.get_agent_policies()
|
policies = self.get_agent_policies()
|
||||||
|
|
||||||
|
processed_policies: List[int] = list()
|
||||||
for _, policy in policies.items():
|
for _, policy in policies.items():
|
||||||
if policy and policy.active and policy.winupdatepolicy.exists():
|
if (
|
||||||
|
policy
|
||||||
|
and policy.active
|
||||||
|
and policy.pk not in processed_policies
|
||||||
|
and policy.winupdatepolicy.exists()
|
||||||
|
):
|
||||||
patch_policy = policy.winupdatepolicy.first()
|
patch_policy = policy.winupdatepolicy.first()
|
||||||
break
|
|
||||||
|
|
||||||
# if policy still doesn't exist return the agent patch policy
|
# if policy still doesn't exist return the agent patch policy
|
||||||
if not patch_policy:
|
if not patch_policy:
|
||||||
@@ -747,7 +613,7 @@ class Agent(BaseAuditModel):
|
|||||||
policies = self.get_agent_policies()
|
policies = self.get_agent_policies()
|
||||||
|
|
||||||
# loop through all policies applied to agent and return an alert_template if found
|
# loop through all policies applied to agent and return an alert_template if found
|
||||||
processed_policies: List[int] = []
|
processed_policies: List[int] = list()
|
||||||
for key, policy in policies.items():
|
for key, policy in policies.items():
|
||||||
# default alert_template will override a default policy with alert template applied
|
# default alert_template will override a default policy with alert template applied
|
||||||
if (
|
if (
|
||||||
@@ -812,10 +678,10 @@ class Agent(BaseAuditModel):
|
|||||||
cache_key = f"agent_{self.agent_id}_checks"
|
cache_key = f"agent_{self.agent_id}_checks"
|
||||||
|
|
||||||
elif self.policy:
|
elif self.policy:
|
||||||
cache_key = f"site_{self.monitoring_type}_{self.plat}_{self.site_id}_policy_{self.policy_id}_checks"
|
cache_key = f"site_{self.monitoring_type}_{self.site_id}_policy_{self.policy_id}_checks"
|
||||||
|
|
||||||
else:
|
else:
|
||||||
cache_key = f"site_{self.monitoring_type}_{self.plat}_{self.site_id}_checks"
|
cache_key = f"site_{self.monitoring_type}_{self.site_id}_checks"
|
||||||
|
|
||||||
cached_checks = cache.get(cache_key)
|
cached_checks = cache.get(cache_key)
|
||||||
if isinstance(cached_checks, list):
|
if isinstance(cached_checks, list):
|
||||||
@@ -837,10 +703,10 @@ class Agent(BaseAuditModel):
|
|||||||
cache_key = f"agent_{self.agent_id}_tasks"
|
cache_key = f"agent_{self.agent_id}_tasks"
|
||||||
|
|
||||||
elif self.policy:
|
elif self.policy:
|
||||||
cache_key = f"site_{self.monitoring_type}_{self.plat}_{self.site_id}_policy_{self.policy_id}_tasks"
|
cache_key = f"site_{self.monitoring_type}_{self.site_id}_policy_{self.policy_id}_tasks"
|
||||||
|
|
||||||
else:
|
else:
|
||||||
cache_key = f"site_{self.monitoring_type}_{self.plat}_{self.site_id}_tasks"
|
cache_key = f"site_{self.monitoring_type}_{self.site_id}_tasks"
|
||||||
|
|
||||||
cached_tasks = cache.get(cache_key)
|
cached_tasks = cache.get(cache_key)
|
||||||
if isinstance(cached_tasks, list):
|
if isinstance(cached_tasks, list):
|
||||||
@@ -848,15 +714,25 @@ class Agent(BaseAuditModel):
|
|||||||
else:
|
else:
|
||||||
# get agent tasks based on policies
|
# get agent tasks based on policies
|
||||||
tasks = Policy.get_policy_tasks(self)
|
tasks = Policy.get_policy_tasks(self)
|
||||||
cache.set(cache_key, tasks, 600)
|
cache.set(f"site_{self.site_id}_tasks", tasks, 600)
|
||||||
return tasks
|
return tasks
|
||||||
|
|
||||||
|
def _do_nats_debug(self, agent: "Agent", message: str) -> None:
|
||||||
|
DebugLog.error(agent=agent, log_type="agent_issues", message=message)
|
||||||
|
|
||||||
async def nats_cmd(
|
async def nats_cmd(
|
||||||
self, data: Dict[Any, Any], timeout: int = 30, wait: bool = True
|
self, data: Dict[Any, Any], timeout: int = 30, wait: bool = True
|
||||||
) -> Any:
|
) -> Any:
|
||||||
opts = setup_nats_options()
|
options = {
|
||||||
|
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||||
|
"user": "tacticalrmm",
|
||||||
|
"password": settings.SECRET_KEY,
|
||||||
|
"connect_timeout": 3,
|
||||||
|
"max_reconnect_attempts": 2,
|
||||||
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
nc = await nats.connect(**opts)
|
nc = await nats.connect(**options)
|
||||||
except:
|
except:
|
||||||
return "natsdown"
|
return "natsdown"
|
||||||
|
|
||||||
@@ -872,7 +748,9 @@ class Agent(BaseAuditModel):
|
|||||||
ret = msgpack.loads(msg.data)
|
ret = msgpack.loads(msg.data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
ret = str(e)
|
ret = str(e)
|
||||||
logger.error(e)
|
await sync_to_async(self._do_nats_debug, thread_sensitive=False)(
|
||||||
|
agent=self, message=ret
|
||||||
|
)
|
||||||
|
|
||||||
await nc.close()
|
await nc.close()
|
||||||
return ret
|
return ret
|
||||||
@@ -881,41 +759,6 @@ class Agent(BaseAuditModel):
|
|||||||
await nc.flush()
|
await nc.flush()
|
||||||
await nc.close()
|
await nc.close()
|
||||||
|
|
||||||
def recover(self, mode: str, mesh_uri: str, wait: bool = True) -> tuple[str, bool]:
|
|
||||||
"""
|
|
||||||
Return type: tuple(message: str, error: bool)
|
|
||||||
"""
|
|
||||||
if mode == "tacagent":
|
|
||||||
if self.plat == AgentPlat.LINUX:
|
|
||||||
cmd = "systemctl restart tacticalagent.service"
|
|
||||||
shell = 3
|
|
||||||
elif self.plat == AgentPlat.DARWIN:
|
|
||||||
cmd = "launchctl kickstart -k system/tacticalagent"
|
|
||||||
shell = 3
|
|
||||||
else:
|
|
||||||
cmd = "net stop tacticalrmm & taskkill /F /IM tacticalrmm.exe & net start tacticalrmm"
|
|
||||||
shell = 1
|
|
||||||
|
|
||||||
asyncio.run(
|
|
||||||
send_command_with_mesh(cmd, mesh_uri, self.mesh_node_id, shell, 0)
|
|
||||||
)
|
|
||||||
return "ok", False
|
|
||||||
|
|
||||||
elif mode == "mesh":
|
|
||||||
data = {"func": "recover", "payload": {"mode": mode}}
|
|
||||||
if wait:
|
|
||||||
r = asyncio.run(self.nats_cmd(data, timeout=20))
|
|
||||||
if r == "ok":
|
|
||||||
return "ok", False
|
|
||||||
else:
|
|
||||||
return str(r), True
|
|
||||||
else:
|
|
||||||
asyncio.run(self.nats_cmd(data, timeout=20, wait=False))
|
|
||||||
|
|
||||||
return "ok", False
|
|
||||||
|
|
||||||
return "invalid", True
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(agent: "Agent") -> Dict[str, Any]:
|
def serialize(agent: "Agent") -> Dict[str, Any]:
|
||||||
# serializes the agent and returns json
|
# serializes the agent and returns json
|
||||||
@@ -924,7 +767,7 @@ class Agent(BaseAuditModel):
|
|||||||
return AgentAuditSerializer(agent).data
|
return AgentAuditSerializer(agent).data
|
||||||
|
|
||||||
def delete_superseded_updates(self) -> None:
|
def delete_superseded_updates(self) -> None:
|
||||||
with suppress(Exception):
|
try:
|
||||||
pks = [] # list of pks to delete
|
pks = [] # list of pks to delete
|
||||||
kbs = list(self.winupdates.values_list("kb", flat=True))
|
kbs = list(self.winupdates.values_list("kb", flat=True))
|
||||||
d = Counter(kbs)
|
d = Counter(kbs)
|
||||||
@@ -935,10 +778,8 @@ class Agent(BaseAuditModel):
|
|||||||
# extract the version from the title and sort from oldest to newest
|
# extract the version from the title and sort from oldest to newest
|
||||||
# skip if no version info is available therefore nothing to parse
|
# skip if no version info is available therefore nothing to parse
|
||||||
try:
|
try:
|
||||||
matches = r"(Version|Versão)"
|
|
||||||
pattern = r"\(" + matches + r"(.*?)\)"
|
|
||||||
vers = [
|
vers = [
|
||||||
re.search(pattern, i, flags=re.IGNORECASE).group(2).strip()
|
re.search(r"\(Version(.*?)\)", i).group(1).strip()
|
||||||
for i in titles
|
for i in titles
|
||||||
]
|
]
|
||||||
sorted_vers = sorted(vers, key=LooseVersion)
|
sorted_vers = sorted(vers, key=LooseVersion)
|
||||||
@@ -951,26 +792,24 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
pks = list(set(pks))
|
pks = list(set(pks))
|
||||||
self.winupdates.filter(pk__in=pks).delete()
|
self.winupdates.filter(pk__in=pks).delete()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
def should_create_alert(
|
def should_create_alert(
|
||||||
self, alert_template: "Optional[AlertTemplate]" = None
|
self, alert_template: "Optional[AlertTemplate]" = None
|
||||||
) -> bool:
|
) -> bool:
|
||||||
has_agent_notification = (
|
return bool(
|
||||||
self.overdue_dashboard_alert
|
self.overdue_dashboard_alert
|
||||||
or self.overdue_email_alert
|
or self.overdue_email_alert
|
||||||
or self.overdue_text_alert
|
or self.overdue_text_alert
|
||||||
)
|
or (
|
||||||
has_alert_template_notification = alert_template and (
|
alert_template
|
||||||
|
and (
|
||||||
alert_template.agent_always_alert
|
alert_template.agent_always_alert
|
||||||
or alert_template.agent_always_email
|
or alert_template.agent_always_email
|
||||||
or alert_template.agent_always_text
|
or alert_template.agent_always_text
|
||||||
)
|
)
|
||||||
|
)
|
||||||
return bool(
|
|
||||||
has_agent_notification
|
|
||||||
or has_alert_template_notification
|
|
||||||
or has_webhook(alert_template, "agent")
|
|
||||||
or has_script_actions(alert_template, "agent")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def send_outage_email(self) -> None:
|
def send_outage_email(self) -> None:
|
||||||
@@ -1064,54 +903,60 @@ class AgentCustomField(models.Model):
|
|||||||
default=list,
|
default=list,
|
||||||
)
|
)
|
||||||
|
|
||||||
class Meta:
|
|
||||||
unique_together = (("agent", "field"),)
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return self.field.name
|
return self.field.name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def value(self) -> Union[List[Any], bool, str]:
|
def value(self) -> Union[List[Any], bool, str]:
|
||||||
if self.field.type == CustomFieldType.MULTIPLE:
|
if self.field.type == "multiple":
|
||||||
return cast(List[str], self.multiple_value)
|
return cast(List[str], self.multiple_value)
|
||||||
elif self.field.type == CustomFieldType.CHECKBOX:
|
elif self.field.type == "checkbox":
|
||||||
return self.bool_value
|
return self.bool_value
|
||||||
|
else:
|
||||||
return cast(str, self.string_value)
|
return cast(str, self.string_value)
|
||||||
|
|
||||||
def save_to_field(self, value: Union[List[Any], bool, str]) -> None:
|
def save_to_field(self, value: Union[List[Any], bool, str]) -> None:
|
||||||
if self.field.type in (
|
if self.field.type in [
|
||||||
CustomFieldType.TEXT,
|
"text",
|
||||||
CustomFieldType.NUMBER,
|
"number",
|
||||||
CustomFieldType.SINGLE,
|
"single",
|
||||||
CustomFieldType.DATETIME,
|
"datetime",
|
||||||
):
|
]:
|
||||||
self.string_value = cast(str, value)
|
self.string_value = cast(str, value)
|
||||||
self.save()
|
self.save()
|
||||||
elif self.field.type == CustomFieldType.MULTIPLE:
|
elif self.field.type == "multiple":
|
||||||
self.multiple_value = value.split(",")
|
self.multiple_value = value.split(",")
|
||||||
self.save()
|
self.save()
|
||||||
elif self.field.type == CustomFieldType.CHECKBOX:
|
elif self.field.type == "checkbox":
|
||||||
self.bool_value = bool(value)
|
self.bool_value = bool(value)
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
|
|
||||||
|
AGENT_HISTORY_TYPES = (
|
||||||
|
("task_run", "Task Run"),
|
||||||
|
("script_run", "Script Run"),
|
||||||
|
("cmd_run", "CMD Run"),
|
||||||
|
)
|
||||||
|
|
||||||
|
AGENT_HISTORY_STATUS = (("success", "Success"), ("failure", "Failure"))
|
||||||
|
|
||||||
|
|
||||||
class AgentHistory(models.Model):
|
class AgentHistory(models.Model):
|
||||||
objects = PermissionQuerySet.as_manager()
|
objects = PermissionQuerySet.as_manager()
|
||||||
|
|
||||||
id = models.BigAutoField(primary_key=True)
|
|
||||||
agent = models.ForeignKey(
|
agent = models.ForeignKey(
|
||||||
Agent,
|
Agent,
|
||||||
related_name="history",
|
related_name="history",
|
||||||
on_delete=models.CASCADE,
|
on_delete=models.CASCADE,
|
||||||
)
|
)
|
||||||
time = models.DateTimeField(auto_now_add=True)
|
time = models.DateTimeField(auto_now_add=True)
|
||||||
type: "AgentHistoryType" = models.CharField(
|
type = models.CharField(
|
||||||
max_length=50,
|
max_length=50, choices=AGENT_HISTORY_TYPES, default="cmd_run"
|
||||||
choices=AgentHistoryType.choices,
|
|
||||||
default=AgentHistoryType.CMD_RUN,
|
|
||||||
)
|
)
|
||||||
command = models.TextField(null=True, blank=True, default="")
|
command = models.TextField(null=True, blank=True, default="")
|
||||||
|
status = models.CharField(
|
||||||
|
max_length=50, choices=AGENT_HISTORY_STATUS, default="success"
|
||||||
|
)
|
||||||
username = models.CharField(max_length=255, default="system")
|
username = models.CharField(max_length=255, default="system")
|
||||||
results = models.TextField(null=True, blank=True)
|
results = models.TextField(null=True, blank=True)
|
||||||
script = models.ForeignKey(
|
script = models.ForeignKey(
|
||||||
@@ -1122,15 +967,6 @@ class AgentHistory(models.Model):
|
|||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
)
|
)
|
||||||
script_results = models.JSONField(null=True, blank=True)
|
script_results = models.JSONField(null=True, blank=True)
|
||||||
custom_field = models.ForeignKey(
|
|
||||||
"core.CustomField",
|
|
||||||
null=True,
|
|
||||||
blank=True,
|
|
||||||
related_name="history",
|
|
||||||
on_delete=models.SET_NULL,
|
|
||||||
)
|
|
||||||
collector_all_output = models.BooleanField(default=False)
|
|
||||||
save_to_agent_note = models.BooleanField(default=False)
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"{self.agent.hostname} - {self.type}"
|
return f"{self.agent.hostname} - {self.type}"
|
||||||
|
|||||||
@@ -27,9 +27,6 @@ class AgentPerms(permissions.BasePermission):
|
|||||||
|
|
||||||
class RecoverAgentPerms(permissions.BasePermission):
|
class RecoverAgentPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view) -> bool:
|
def has_permission(self, r, view) -> bool:
|
||||||
if "agent_id" not in view.kwargs.keys():
|
|
||||||
return _has_perm(r, "can_recover_agents")
|
|
||||||
|
|
||||||
return _has_perm(r, "can_recover_agents") and _has_perm_on_agent(
|
return _has_perm(r, "can_recover_agents") and _has_perm_on_agent(
|
||||||
r.user, view.kwargs["agent_id"]
|
r.user, view.kwargs["agent_id"]
|
||||||
)
|
)
|
||||||
@@ -47,6 +44,13 @@ class UpdateAgentPerms(permissions.BasePermission):
|
|||||||
return _has_perm(r, "can_update_agents")
|
return _has_perm(r, "can_update_agents")
|
||||||
|
|
||||||
|
|
||||||
|
class PingAgentPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view) -> bool:
|
||||||
|
return _has_perm(r, "can_ping_agents") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ManageProcPerms(permissions.BasePermission):
|
class ManageProcPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view) -> bool:
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_manage_procs") and _has_perm_on_agent(
|
return _has_perm(r, "can_manage_procs") and _has_perm_on_agent(
|
||||||
@@ -89,8 +93,10 @@ class RunScriptPerms(permissions.BasePermission):
|
|||||||
|
|
||||||
class AgentNotesPerms(permissions.BasePermission):
|
class AgentNotesPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view) -> bool:
|
def has_permission(self, r, view) -> bool:
|
||||||
|
|
||||||
# permissions for GET /agents/notes/ endpoint
|
# permissions for GET /agents/notes/ endpoint
|
||||||
if r.method == "GET":
|
if r.method == "GET":
|
||||||
|
|
||||||
# permissions for /agents/<agent_id>/notes endpoint
|
# permissions for /agents/<agent_id>/notes endpoint
|
||||||
if "agent_id" in view.kwargs.keys():
|
if "agent_id" in view.kwargs.keys():
|
||||||
return _has_perm(r, "can_list_notes") and _has_perm_on_agent(
|
return _has_perm(r, "can_list_notes") and _has_perm_on_agent(
|
||||||
@@ -113,15 +119,5 @@ class AgentHistoryPerms(permissions.BasePermission):
|
|||||||
return _has_perm(r, "can_list_agent_history") and _has_perm_on_agent(
|
return _has_perm(r, "can_list_agent_history") and _has_perm_on_agent(
|
||||||
r.user, view.kwargs["agent_id"]
|
r.user, view.kwargs["agent_id"]
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
return _has_perm(r, "can_list_agent_history")
|
return _has_perm(r, "can_list_agent_history")
|
||||||
|
|
||||||
|
|
||||||
class AgentWOLPerms(permissions.BasePermission):
|
|
||||||
def has_permission(self, r, view) -> bool:
|
|
||||||
if "agent_id" in view.kwargs.keys():
|
|
||||||
return _has_perm(r, "can_send_wol") and _has_perm_on_agent(
|
|
||||||
r.user, view.kwargs["agent_id"]
|
|
||||||
)
|
|
||||||
|
|
||||||
return _has_perm(r, "can_send_wol")
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
|
import pytz
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from tacticalrmm.constants import AGENT_STATUS_ONLINE, ALL_TIMEZONES
|
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from .models import Agent, AgentCustomField, AgentHistory, Note
|
from .models import Agent, AgentCustomField, AgentHistory, Note
|
||||||
@@ -70,7 +69,7 @@ class AgentSerializer(serializers.ModelSerializer):
|
|||||||
return policies
|
return policies
|
||||||
|
|
||||||
def get_all_timezones(self, obj):
|
def get_all_timezones(self, obj):
|
||||||
return ALL_TIMEZONES
|
return pytz.all_timezones
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Agent
|
model = Agent
|
||||||
@@ -89,18 +88,12 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
|||||||
last_seen = serializers.ReadOnlyField()
|
last_seen = serializers.ReadOnlyField()
|
||||||
pending_actions_count = serializers.ReadOnlyField()
|
pending_actions_count = serializers.ReadOnlyField()
|
||||||
has_patches_pending = serializers.ReadOnlyField()
|
has_patches_pending = serializers.ReadOnlyField()
|
||||||
cpu_model = serializers.ReadOnlyField()
|
|
||||||
graphics = serializers.ReadOnlyField()
|
|
||||||
local_ips = serializers.ReadOnlyField()
|
|
||||||
make_model = serializers.ReadOnlyField()
|
|
||||||
physical_disks = serializers.ReadOnlyField()
|
|
||||||
serial_number = serializers.ReadOnlyField()
|
|
||||||
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
|
|
||||||
|
|
||||||
def get_alert_template(self, obj):
|
def get_alert_template(self, obj):
|
||||||
|
|
||||||
if not obj.alert_template:
|
if not obj.alert_template:
|
||||||
return None
|
return None
|
||||||
|
else:
|
||||||
return {
|
return {
|
||||||
"name": obj.alert_template.name,
|
"name": obj.alert_template.name,
|
||||||
"always_email": obj.alert_template.agent_always_email,
|
"always_email": obj.alert_template.agent_always_email,
|
||||||
@@ -109,15 +102,15 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def get_logged_username(self, obj) -> str:
|
def get_logged_username(self, obj) -> str:
|
||||||
if obj.logged_in_username == "None" and obj.status == AGENT_STATUS_ONLINE:
|
if obj.logged_in_username == "None" and obj.status == "online":
|
||||||
return obj.last_logged_in_user
|
return obj.last_logged_in_user
|
||||||
elif obj.logged_in_username != "None":
|
elif obj.logged_in_username != "None":
|
||||||
return obj.logged_in_username
|
return obj.logged_in_username
|
||||||
|
else:
|
||||||
return "-"
|
return "-"
|
||||||
|
|
||||||
def get_italic(self, obj) -> bool:
|
def get_italic(self, obj) -> bool:
|
||||||
return obj.logged_in_username == "None" and obj.status == AGENT_STATUS_ONLINE
|
return obj.logged_in_username == "None" and obj.status == "online"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Agent
|
model = Agent
|
||||||
@@ -146,20 +139,16 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
|||||||
"plat",
|
"plat",
|
||||||
"goarch",
|
"goarch",
|
||||||
"has_patches_pending",
|
"has_patches_pending",
|
||||||
"version",
|
|
||||||
"operating_system",
|
|
||||||
"public_ip",
|
|
||||||
"cpu_model",
|
|
||||||
"graphics",
|
|
||||||
"local_ips",
|
|
||||||
"make_model",
|
|
||||||
"physical_disks",
|
|
||||||
"custom_fields",
|
|
||||||
"serial_number",
|
|
||||||
]
|
]
|
||||||
depth = 2
|
depth = 2
|
||||||
|
|
||||||
|
|
||||||
|
class WinAgentSerializer(serializers.ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Agent
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class AgentHostnameSerializer(serializers.ModelSerializer):
|
class AgentHostnameSerializer(serializers.ModelSerializer):
|
||||||
client = serializers.ReadOnlyField(source="client.name")
|
client = serializers.ReadOnlyField(source="client.name")
|
||||||
site = serializers.ReadOnlyField(source="site.name")
|
site = serializers.ReadOnlyField(source="site.name")
|
||||||
|
|||||||
@@ -1,41 +1,107 @@
|
|||||||
|
import asyncio
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
|
import random
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import TYPE_CHECKING, Optional
|
from typing import Optional
|
||||||
|
|
||||||
from django.core.management import call_command
|
|
||||||
from django.utils import timezone as djangotime
|
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
|
from agents.utils import get_agent_url
|
||||||
from core.utils import get_core_settings
|
from core.utils import get_core_settings
|
||||||
from logs.models import DebugLog
|
from django.conf import settings
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
from logs.models import DebugLog, PendingAction
|
||||||
|
from packaging import version as pyver
|
||||||
from scripts.models import Script
|
from scripts.models import Script
|
||||||
from tacticalrmm.celery import app
|
|
||||||
from tacticalrmm.constants import (
|
|
||||||
AGENT_DEFER,
|
|
||||||
AGENT_OUTAGES_LOCK,
|
|
||||||
AGENT_STATUS_OVERDUE,
|
|
||||||
CheckStatus,
|
|
||||||
DebugLogType,
|
|
||||||
)
|
|
||||||
from tacticalrmm.helpers import rand_range
|
|
||||||
from tacticalrmm.utils import redis_lock
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
from tacticalrmm.celery import app
|
||||||
from django.db.models.query import QuerySet
|
|
||||||
|
|
||||||
|
def agent_update(agent_id: str, force: bool = False) -> str:
|
||||||
|
|
||||||
|
agent = Agent.objects.get(agent_id=agent_id)
|
||||||
|
|
||||||
|
if pyver.parse(agent.version) <= pyver.parse("1.3.0"):
|
||||||
|
return "not supported"
|
||||||
|
|
||||||
|
# skip if we can't determine the arch
|
||||||
|
if agent.arch is None:
|
||||||
|
DebugLog.warning(
|
||||||
|
agent=agent,
|
||||||
|
log_type="agent_issues",
|
||||||
|
message=f"Unable to determine arch on {agent.hostname}({agent.agent_id}). Skipping agent update.",
|
||||||
|
)
|
||||||
|
return "noarch"
|
||||||
|
|
||||||
|
version = settings.LATEST_AGENT_VER
|
||||||
|
inno = agent.win_inno_exe
|
||||||
|
url = get_agent_url(agent.arch, agent.plat)
|
||||||
|
|
||||||
|
if not force:
|
||||||
|
if agent.pendingactions.filter(
|
||||||
|
action_type="agentupdate", status="pending"
|
||||||
|
).exists():
|
||||||
|
agent.pendingactions.filter(
|
||||||
|
action_type="agentupdate", status="pending"
|
||||||
|
).delete()
|
||||||
|
|
||||||
|
PendingAction.objects.create(
|
||||||
|
agent=agent,
|
||||||
|
action_type="agentupdate",
|
||||||
|
details={
|
||||||
|
"url": url,
|
||||||
|
"version": version,
|
||||||
|
"inno": inno,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
nats_data = {
|
||||||
|
"func": "agentupdate",
|
||||||
|
"payload": {
|
||||||
|
"url": url,
|
||||||
|
"version": version,
|
||||||
|
"inno": inno,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||||
|
return "created"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def send_agent_update_task(*, agent_ids: list[str], token: str, force: bool) -> None:
|
def force_code_sign(agent_ids: list[str]) -> None:
|
||||||
agents: "QuerySet[Agent]" = Agent.objects.defer(*AGENT_DEFER).filter(
|
chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50))
|
||||||
agent_id__in=agent_ids
|
for chunk in chunks:
|
||||||
)
|
for agent_id in chunk:
|
||||||
for agent in agents:
|
agent_update(agent_id=agent_id, force=True)
|
||||||
agent.do_update(token=token, force=force)
|
sleep(2)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def send_agent_update_task(agent_ids: list[str]) -> None:
|
||||||
|
chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50))
|
||||||
|
for chunk in chunks:
|
||||||
|
for agent_id in chunk:
|
||||||
|
agent_update(agent_id)
|
||||||
|
sleep(2)
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def auto_self_agent_update_task() -> None:
|
def auto_self_agent_update_task() -> None:
|
||||||
call_command("update_agents")
|
core = get_core_settings()
|
||||||
|
if not core.agent_auto_update:
|
||||||
|
return
|
||||||
|
|
||||||
|
q = Agent.objects.only("agent_id", "version")
|
||||||
|
agent_ids: list[str] = [
|
||||||
|
i.agent_id
|
||||||
|
for i in q
|
||||||
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
|
]
|
||||||
|
|
||||||
|
chunks = (agent_ids[i : i + 30] for i in range(0, len(agent_ids), 30))
|
||||||
|
for chunk in chunks:
|
||||||
|
for agent_id in chunk:
|
||||||
|
agent_update(agent_id)
|
||||||
|
sleep(2)
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
@@ -48,7 +114,7 @@ def agent_outage_email_task(pk: int, alert_interval: Optional[float] = None) ->
|
|||||||
return "alert not found"
|
return "alert not found"
|
||||||
|
|
||||||
if not alert.email_sent:
|
if not alert.email_sent:
|
||||||
sleep(rand_range(100, 1500))
|
sleep(random.randint(1, 5))
|
||||||
alert.agent.send_outage_email()
|
alert.agent.send_outage_email()
|
||||||
alert.email_sent = djangotime.now()
|
alert.email_sent = djangotime.now()
|
||||||
alert.save(update_fields=["email_sent"])
|
alert.save(update_fields=["email_sent"])
|
||||||
@@ -57,7 +123,7 @@ def agent_outage_email_task(pk: int, alert_interval: Optional[float] = None) ->
|
|||||||
# send an email only if the last email sent is older than alert interval
|
# send an email only if the last email sent is older than alert interval
|
||||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||||
if alert.email_sent < delta:
|
if alert.email_sent < delta:
|
||||||
sleep(rand_range(100, 1500))
|
sleep(random.randint(1, 5))
|
||||||
alert.agent.send_outage_email()
|
alert.agent.send_outage_email()
|
||||||
alert.email_sent = djangotime.now()
|
alert.email_sent = djangotime.now()
|
||||||
alert.save(update_fields=["email_sent"])
|
alert.save(update_fields=["email_sent"])
|
||||||
@@ -69,7 +135,7 @@ def agent_outage_email_task(pk: int, alert_interval: Optional[float] = None) ->
|
|||||||
def agent_recovery_email_task(pk: int) -> str:
|
def agent_recovery_email_task(pk: int) -> str:
|
||||||
from alerts.models import Alert
|
from alerts.models import Alert
|
||||||
|
|
||||||
sleep(rand_range(100, 1500))
|
sleep(random.randint(1, 5))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
alert = Alert.objects.get(pk=pk)
|
alert = Alert.objects.get(pk=pk)
|
||||||
@@ -93,7 +159,7 @@ def agent_outage_sms_task(pk: int, alert_interval: Optional[float] = None) -> st
|
|||||||
return "alert not found"
|
return "alert not found"
|
||||||
|
|
||||||
if not alert.sms_sent:
|
if not alert.sms_sent:
|
||||||
sleep(rand_range(100, 1500))
|
sleep(random.randint(1, 3))
|
||||||
alert.agent.send_outage_sms()
|
alert.agent.send_outage_sms()
|
||||||
alert.sms_sent = djangotime.now()
|
alert.sms_sent = djangotime.now()
|
||||||
alert.save(update_fields=["sms_sent"])
|
alert.save(update_fields=["sms_sent"])
|
||||||
@@ -102,7 +168,7 @@ def agent_outage_sms_task(pk: int, alert_interval: Optional[float] = None) -> st
|
|||||||
# send an sms only if the last sms sent is older than alert interval
|
# send an sms only if the last sms sent is older than alert interval
|
||||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||||
if alert.sms_sent < delta:
|
if alert.sms_sent < delta:
|
||||||
sleep(rand_range(100, 1500))
|
sleep(random.randint(1, 3))
|
||||||
alert.agent.send_outage_sms()
|
alert.agent.send_outage_sms()
|
||||||
alert.sms_sent = djangotime.now()
|
alert.sms_sent = djangotime.now()
|
||||||
alert.save(update_fields=["sms_sent"])
|
alert.save(update_fields=["sms_sent"])
|
||||||
@@ -114,7 +180,7 @@ def agent_outage_sms_task(pk: int, alert_interval: Optional[float] = None) -> st
|
|||||||
def agent_recovery_sms_task(pk: int) -> str:
|
def agent_recovery_sms_task(pk: int) -> str:
|
||||||
from alerts.models import Alert
|
from alerts.models import Alert
|
||||||
|
|
||||||
sleep(rand_range(100, 1500))
|
sleep(random.randint(1, 3))
|
||||||
try:
|
try:
|
||||||
alert = Alert.objects.get(pk=pk)
|
alert = Alert.objects.get(pk=pk)
|
||||||
except Alert.DoesNotExist:
|
except Alert.DoesNotExist:
|
||||||
@@ -127,21 +193,25 @@ def agent_recovery_sms_task(pk: int) -> str:
|
|||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task(bind=True)
|
@app.task
|
||||||
def agent_outages_task(self) -> str:
|
def agent_outages_task() -> None:
|
||||||
with redis_lock(AGENT_OUTAGES_LOCK, self.app.oid) as acquired:
|
|
||||||
if not acquired:
|
|
||||||
return f"{self.app.oid} still running"
|
|
||||||
|
|
||||||
from alerts.models import Alert
|
from alerts.models import Alert
|
||||||
from core.tasks import _get_agent_qs
|
|
||||||
|
|
||||||
for agent in _get_agent_qs():
|
agents = Agent.objects.only(
|
||||||
if agent.status == AGENT_STATUS_OVERDUE:
|
"pk",
|
||||||
|
"agent_id",
|
||||||
|
"last_seen",
|
||||||
|
"offline_time",
|
||||||
|
"overdue_time",
|
||||||
|
"overdue_email_alert",
|
||||||
|
"overdue_text_alert",
|
||||||
|
"overdue_dashboard_alert",
|
||||||
|
)
|
||||||
|
|
||||||
|
for agent in agents:
|
||||||
|
if agent.status == "overdue":
|
||||||
Alert.handle_alert_failure(agent)
|
Alert.handle_alert_failure(agent)
|
||||||
|
|
||||||
return "completed"
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def run_script_email_results_task(
|
def run_script_email_results_task(
|
||||||
@@ -151,8 +221,6 @@ def run_script_email_results_task(
|
|||||||
emails: list[str],
|
emails: list[str],
|
||||||
args: list[str] = [],
|
args: list[str] = [],
|
||||||
history_pk: int = 0,
|
history_pk: int = 0,
|
||||||
run_as_user: bool = False,
|
|
||||||
env_vars: list[str] = [],
|
|
||||||
):
|
):
|
||||||
agent = Agent.objects.get(pk=agentpk)
|
agent = Agent.objects.get(pk=agentpk)
|
||||||
script = Script.objects.get(pk=scriptpk)
|
script = Script.objects.get(pk=scriptpk)
|
||||||
@@ -163,19 +231,17 @@ def run_script_email_results_task(
|
|||||||
timeout=nats_timeout,
|
timeout=nats_timeout,
|
||||||
wait=True,
|
wait=True,
|
||||||
history_pk=history_pk,
|
history_pk=history_pk,
|
||||||
run_as_user=run_as_user,
|
|
||||||
env_vars=env_vars,
|
|
||||||
)
|
)
|
||||||
if r == "timeout":
|
if r == "timeout":
|
||||||
DebugLog.error(
|
DebugLog.error(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
log_type=DebugLogType.SCRIPTING,
|
log_type="scripting",
|
||||||
message=f"{agent.hostname}({agent.pk}) timed out running script.",
|
message=f"{agent.hostname}({agent.pk}) timed out running script.",
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
CORE = get_core_settings()
|
CORE = get_core_settings()
|
||||||
subject = f"{agent.client.name}, {agent.site.name}, {agent.hostname} {script.name} Results"
|
subject = f"{agent.hostname} {script.name} Results"
|
||||||
exec_time = "{:.4f}".format(r["execution_time"])
|
exec_time = "{:.4f}".format(r["execution_time"])
|
||||||
body = (
|
body = (
|
||||||
subject
|
subject
|
||||||
@@ -223,7 +289,7 @@ def clear_faults_task(older_than_days: int) -> None:
|
|||||||
for check in agent.get_checks_with_policies():
|
for check in agent.get_checks_with_policies():
|
||||||
# reset check status
|
# reset check status
|
||||||
if check.check_result:
|
if check.check_result:
|
||||||
check.check_result.status = CheckStatus.PASSING
|
check.check_result.status = "passing"
|
||||||
check.check_result.save(update_fields=["status"])
|
check.check_result.save(update_fields=["status"])
|
||||||
if check.alert.filter(agent=agent, resolved=False).exists():
|
if check.alert.filter(agent=agent, resolved=False).exists():
|
||||||
alert = Alert.create_or_return_check_alert(check, agent=agent)
|
alert = Alert.create_or_return_check_alert(check, agent=agent)
|
||||||
@@ -252,8 +318,3 @@ def prune_agent_history(older_than_days: int) -> str:
|
|||||||
).delete()
|
).delete()
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def bulk_recover_agents_task() -> None:
|
|
||||||
call_command("bulk_restart_agents")
|
|
||||||
|
|||||||
@@ -1,32 +1,28 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
|
from unittest.mock import patch
|
||||||
|
import pytz
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from unittest.mock import PropertyMock, patch
|
|
||||||
from zoneinfo import ZoneInfo
|
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.test import modify_settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
from logs.models import PendingAction
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
|
from packaging import version as pyver
|
||||||
|
from winupdate.models import WinUpdatePolicy
|
||||||
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from agents.models import Agent, AgentCustomField, AgentHistory, Note
|
from tacticalrmm.test import TacticalTestCase
|
||||||
from agents.serializers import (
|
|
||||||
|
from .models import Agent, AgentCustomField, AgentHistory, Note
|
||||||
|
from .serializers import (
|
||||||
AgentHistorySerializer,
|
AgentHistorySerializer,
|
||||||
AgentHostnameSerializer,
|
AgentHostnameSerializer,
|
||||||
AgentNoteSerializer,
|
AgentNoteSerializer,
|
||||||
AgentSerializer,
|
AgentSerializer,
|
||||||
)
|
)
|
||||||
from tacticalrmm.constants import (
|
from .tasks import auto_self_agent_update_task
|
||||||
AGENT_STATUS_OFFLINE,
|
|
||||||
AGENT_STATUS_ONLINE,
|
|
||||||
AgentMonType,
|
|
||||||
CustomFieldModel,
|
|
||||||
CustomFieldType,
|
|
||||||
EvtLogNames,
|
|
||||||
)
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
from winupdate.models import WinUpdatePolicy
|
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from clients.models import Client, Site
|
from clients.models import Client, Site
|
||||||
@@ -34,6 +30,11 @@ if TYPE_CHECKING:
|
|||||||
base_url = "/agents"
|
base_url = "/agents"
|
||||||
|
|
||||||
|
|
||||||
|
@modify_settings(
|
||||||
|
MIDDLEWARE={
|
||||||
|
"remove": "tacticalrmm.middleware.LinuxMiddleware",
|
||||||
|
}
|
||||||
|
)
|
||||||
class TestAgentsList(TacticalTestCase):
|
class TestAgentsList(TacticalTestCase):
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.authenticate()
|
self.authenticate()
|
||||||
@@ -50,27 +51,24 @@ class TestAgentsList(TacticalTestCase):
|
|||||||
site3: "Site" = baker.make("clients.Site", client=company2)
|
site3: "Site" = baker.make("clients.Site", client=company2)
|
||||||
|
|
||||||
baker.make_recipe(
|
baker.make_recipe(
|
||||||
"agents.online_agent",
|
"agents.online_agent", site=site1, monitoring_type="server", _quantity=15
|
||||||
site=site1,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
_quantity=15,
|
|
||||||
)
|
)
|
||||||
baker.make_recipe(
|
baker.make_recipe(
|
||||||
"agents.online_agent",
|
"agents.online_agent",
|
||||||
site=site2,
|
site=site2,
|
||||||
monitoring_type=AgentMonType.WORKSTATION,
|
monitoring_type="workstation",
|
||||||
_quantity=10,
|
_quantity=10,
|
||||||
)
|
)
|
||||||
baker.make_recipe(
|
baker.make_recipe(
|
||||||
"agents.online_agent",
|
"agents.online_agent",
|
||||||
site=site3,
|
site=site3,
|
||||||
monitoring_type=AgentMonType.SERVER,
|
monitoring_type="server",
|
||||||
_quantity=4,
|
_quantity=4,
|
||||||
)
|
)
|
||||||
baker.make_recipe(
|
baker.make_recipe(
|
||||||
"agents.online_agent",
|
"agents.online_agent",
|
||||||
site=site3,
|
site=site3,
|
||||||
monitoring_type=AgentMonType.WORKSTATION,
|
monitoring_type="workstation",
|
||||||
_quantity=7,
|
_quantity=7,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -102,6 +100,11 @@ class TestAgentsList(TacticalTestCase):
|
|||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
|
||||||
|
@modify_settings(
|
||||||
|
MIDDLEWARE={
|
||||||
|
"remove": "tacticalrmm.middleware.LinuxMiddleware",
|
||||||
|
}
|
||||||
|
)
|
||||||
class TestAgentViews(TacticalTestCase):
|
class TestAgentViews(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.authenticate()
|
self.authenticate()
|
||||||
@@ -114,17 +117,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
)
|
)
|
||||||
baker.make_recipe("winupdate.winupdate_policy", agent=self.agent)
|
baker.make_recipe("winupdate.winupdate_policy", agent=self.agent)
|
||||||
|
|
||||||
@patch("agents.tasks.bulk_recover_agents_task.delay")
|
|
||||||
def test_bulk_agent_recovery(self, mock_task):
|
|
||||||
mock_task.return_value = None
|
|
||||||
url = f"{base_url}/bulkrecovery/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
mock_task.assert_called_once()
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
|
||||||
|
|
||||||
def test_get_agent(self):
|
def test_get_agent(self):
|
||||||
url = f"{base_url}/{self.agent.agent_id}/"
|
url = f"{base_url}/{self.agent.agent_id}/"
|
||||||
|
|
||||||
@@ -178,11 +170,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
self.assertEqual(data["run_time_days"], [2, 3, 6])
|
self.assertEqual(data["run_time_days"], [2, 3, 6])
|
||||||
|
|
||||||
# test adding custom fields
|
# test adding custom fields
|
||||||
field = baker.make(
|
field = baker.make("core.CustomField", model="agent", type="number")
|
||||||
"core.CustomField",
|
|
||||||
model=CustomFieldModel.AGENT,
|
|
||||||
type=CustomFieldType.NUMBER,
|
|
||||||
)
|
|
||||||
data = {
|
data = {
|
||||||
"site": site.pk,
|
"site": site.pk,
|
||||||
"description": "asjdk234andasd",
|
"description": "asjdk234andasd",
|
||||||
@@ -239,7 +227,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
self.agent.save(update_fields=["policy"])
|
self.agent.save(update_fields=["policy"])
|
||||||
_ = self.agent.get_patch_policy()
|
_ = self.agent.get_patch_policy()
|
||||||
|
|
||||||
self.agent.monitoring_type = AgentMonType.WORKSTATION
|
self.agent.monitoring_type = "workstation"
|
||||||
self.agent.save(update_fields=["monitoring_type"])
|
self.agent.save(update_fields=["monitoring_type"])
|
||||||
_ = self.agent.get_patch_policy()
|
_ = self.agent.get_patch_policy()
|
||||||
|
|
||||||
@@ -251,21 +239,52 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
self.coresettings.save(update_fields=["server_policy", "workstation_policy"])
|
self.coresettings.save(update_fields=["server_policy", "workstation_policy"])
|
||||||
_ = self.agent.get_patch_policy()
|
_ = self.agent.get_patch_policy()
|
||||||
|
|
||||||
self.agent.monitoring_type = AgentMonType.SERVER
|
self.agent.monitoring_type = "server"
|
||||||
self.agent.save(update_fields=["monitoring_type"])
|
self.agent.save(update_fields=["monitoring_type"])
|
||||||
_ = self.agent.get_patch_policy()
|
_ = self.agent.get_patch_policy()
|
||||||
|
|
||||||
def test_get_agent_versions(self):
|
def test_get_agent_versions(self):
|
||||||
url = "/agents/versions/"
|
url = "/agents/versions/"
|
||||||
|
|
||||||
with self.assertNumQueries(1):
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
|
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
assert any(i["hostname"] == self.agent.hostname for i in r.json()["agents"])
|
assert any(i["hostname"] == self.agent.hostname for i in r.json()["agents"])
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
@patch("agents.tasks.send_agent_update_task.delay")
|
||||||
|
def test_update_agents(self, mock_task):
|
||||||
|
url = f"{base_url}/update/"
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version=settings.LATEST_AGENT_VER,
|
||||||
|
_quantity=15,
|
||||||
|
)
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.3.0",
|
||||||
|
_quantity=15,
|
||||||
|
)
|
||||||
|
|
||||||
|
agent_ids: list[str] = list(
|
||||||
|
Agent.objects.only("agent_id", "version").values_list("agent_id", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
data = {"agent_ids": agent_ids}
|
||||||
|
expected: list[str] = [
|
||||||
|
i.agent_id
|
||||||
|
for i in Agent.objects.only("agent_id", "version")
|
||||||
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
|
]
|
||||||
|
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
mock_task.assert_called_with(agent_ids=expected)
|
||||||
|
|
||||||
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
@patch("time.sleep", return_value=None)
|
@patch("time.sleep", return_value=None)
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_agent_ping(self, nats_cmd, mock_sleep):
|
def test_agent_ping(self, nats_cmd, mock_sleep):
|
||||||
@@ -274,25 +293,25 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
nats_cmd.return_value = "timeout"
|
nats_cmd.return_value = "timeout"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
ret = {"name": self.agent.hostname, "status": AGENT_STATUS_OFFLINE}
|
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||||
self.assertEqual(r.json(), ret)
|
self.assertEqual(r.json(), ret)
|
||||||
|
|
||||||
nats_cmd.return_value = "natsdown"
|
nats_cmd.return_value = "natsdown"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
ret = {"name": self.agent.hostname, "status": AGENT_STATUS_OFFLINE}
|
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||||
self.assertEqual(r.json(), ret)
|
self.assertEqual(r.json(), ret)
|
||||||
|
|
||||||
nats_cmd.return_value = "pong"
|
nats_cmd.return_value = "pong"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
ret = {"name": self.agent.hostname, "status": AGENT_STATUS_ONLINE}
|
ret = {"name": self.agent.hostname, "status": "online"}
|
||||||
self.assertEqual(r.json(), ret)
|
self.assertEqual(r.json(), ret)
|
||||||
|
|
||||||
nats_cmd.return_value = "asdasjdaksdasd"
|
nats_cmd.return_value = "asdasjdaksdasd"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
ret = {"name": self.agent.hostname, "status": AGENT_STATUS_OFFLINE}
|
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||||
self.assertEqual(r.json(), ret)
|
self.assertEqual(r.json(), ret)
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
@@ -352,7 +371,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"func": "eventlog",
|
"func": "eventlog",
|
||||||
"timeout": 30,
|
"timeout": 30,
|
||||||
"payload": {
|
"payload": {
|
||||||
"logname": EvtLogNames.APPLICATION,
|
"logname": "Application",
|
||||||
"days": str(22),
|
"days": str(22),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -367,7 +386,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"func": "eventlog",
|
"func": "eventlog",
|
||||||
"timeout": 180,
|
"timeout": 180,
|
||||||
"payload": {
|
"payload": {
|
||||||
"logname": EvtLogNames.SECURITY,
|
"logname": "Security",
|
||||||
"days": str(6),
|
"days": str(6),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -403,7 +422,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"cmd": "ipconfig",
|
"cmd": "ipconfig",
|
||||||
"shell": "cmd",
|
"shell": "cmd",
|
||||||
"timeout": 30,
|
"timeout": 30,
|
||||||
"run_as_user": False,
|
|
||||||
}
|
}
|
||||||
mock_ret.return_value = "nt authority\\system"
|
mock_ret.return_value = "nt authority\\system"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -418,20 +436,16 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_reboot_later(self, nats_cmd):
|
def test_reboot_later(self, nats_cmd):
|
||||||
nats_cmd.return_value = "ok"
|
|
||||||
url = f"{base_url}/{self.agent.agent_id}/reboot/"
|
url = f"{base_url}/{self.agent.agent_id}/reboot/"
|
||||||
|
|
||||||
# ensure we don't allow dates in past
|
data = {
|
||||||
data = {"datetime": "2022-07-11T01:51"}
|
"datetime": "2025-08-29T18:41:02",
|
||||||
r = self.client.patch(url, data, format="json")
|
}
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
self.assertEqual(r.data, "Date cannot be set in the past")
|
|
||||||
|
|
||||||
# test with date in future
|
nats_cmd.return_value = "ok"
|
||||||
data["datetime"] = "2027-08-29T18:41"
|
|
||||||
r = self.client.patch(url, data, format="json")
|
r = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data["time"], "August 29, 2027 at 06:41 PM")
|
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")
|
||||||
self.assertEqual(r.data["agent"], self.agent.hostname)
|
self.assertEqual(r.data["agent"], self.agent.hostname)
|
||||||
|
|
||||||
nats_data = {
|
nats_data = {
|
||||||
@@ -444,12 +458,12 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"multiple_instances": 2,
|
"multiple_instances": 2,
|
||||||
"trigger": "runonce",
|
"trigger": "runonce",
|
||||||
"name": r.data["task_name"],
|
"name": r.data["task_name"],
|
||||||
"start_year": 2027,
|
"start_year": 2025,
|
||||||
"start_month": 8,
|
"start_month": 8,
|
||||||
"start_day": 29,
|
"start_day": 29,
|
||||||
"start_hour": 18,
|
"start_hour": 18,
|
||||||
"start_min": 41,
|
"start_min": 41,
|
||||||
"expire_year": 2027,
|
"expire_year": 2025,
|
||||||
"expire_month": 8,
|
"expire_month": 8,
|
||||||
"expire_day": 29,
|
"expire_day": 29,
|
||||||
"expire_hour": 18,
|
"expire_hour": 18,
|
||||||
@@ -472,6 +486,42 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
def test_install_agent(self):
|
||||||
|
url = f"{base_url}/installer/"
|
||||||
|
|
||||||
|
site = baker.make("clients.Site")
|
||||||
|
data = {
|
||||||
|
"client": site.client.pk,
|
||||||
|
"site": site.pk,
|
||||||
|
"arch": "64",
|
||||||
|
"expires": 23,
|
||||||
|
"installMethod": "manual",
|
||||||
|
"api": "https://api.example.com",
|
||||||
|
"agenttype": "server",
|
||||||
|
"rdp": 1,
|
||||||
|
"ping": 0,
|
||||||
|
"power": 0,
|
||||||
|
"fileName": "rmm-client-site-server.exe",
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
data["arch"] = "64"
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertIn("rdp", r.json()["cmd"])
|
||||||
|
self.assertNotIn("power", r.json()["cmd"])
|
||||||
|
|
||||||
|
data.update({"ping": 1, "power": 1})
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertIn("power", r.json()["cmd"])
|
||||||
|
self.assertIn("ping", r.json()["cmd"])
|
||||||
|
|
||||||
|
data["installMethod"] = "powershell"
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
@patch("meshctrl.utils.get_login_token")
|
@patch("meshctrl.utils.get_login_token")
|
||||||
def test_meshcentral_tabs(self, mock_token):
|
def test_meshcentral_tabs(self, mock_token):
|
||||||
url = f"{base_url}/{self.agent.agent_id}/meshcentral/"
|
url = f"{base_url}/{self.agent.agent_id}/meshcentral/"
|
||||||
@@ -526,9 +576,10 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
@patch("agents.tasks.run_script_email_results_task.delay")
|
@patch("agents.tasks.run_script_email_results_task.delay")
|
||||||
@patch("agents.models.Agent.run_script")
|
@patch("agents.models.Agent.run_script")
|
||||||
def test_run_script(self, run_script, email_task):
|
def test_run_script(self, run_script, email_task):
|
||||||
from agents.models import AgentCustomField, AgentHistory, Note
|
|
||||||
from clients.models import ClientCustomField, SiteCustomField
|
from clients.models import ClientCustomField, SiteCustomField
|
||||||
|
|
||||||
|
from .models import AgentCustomField, AgentHistory, Note
|
||||||
|
|
||||||
run_script.return_value = "ok"
|
run_script.return_value = "ok"
|
||||||
url = f"/agents/{self.agent.agent_id}/runscript/"
|
url = f"/agents/{self.agent.agent_id}/runscript/"
|
||||||
script = baker.make_recipe("scripts.script")
|
script = baker.make_recipe("scripts.script")
|
||||||
@@ -539,8 +590,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"output": "wait",
|
"output": "wait",
|
||||||
"args": [],
|
"args": [],
|
||||||
"timeout": 15,
|
"timeout": 15,
|
||||||
"run_as_user": False,
|
|
||||||
"env_vars": ["hello=world", "foo=bar"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -550,13 +599,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
raise AgentHistory.DoesNotExist
|
raise AgentHistory.DoesNotExist
|
||||||
|
|
||||||
run_script.assert_called_with(
|
run_script.assert_called_with(
|
||||||
scriptpk=script.pk,
|
scriptpk=script.pk, args=[], timeout=18, wait=True, history_pk=hist.pk
|
||||||
args=[],
|
|
||||||
timeout=18,
|
|
||||||
wait=True,
|
|
||||||
history_pk=hist.pk,
|
|
||||||
run_as_user=False,
|
|
||||||
env_vars=["hello=world", "foo=bar"],
|
|
||||||
)
|
)
|
||||||
run_script.reset_mock()
|
run_script.reset_mock()
|
||||||
|
|
||||||
@@ -568,21 +611,15 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"timeout": 15,
|
"timeout": 15,
|
||||||
"emailMode": "default",
|
"emailMode": "default",
|
||||||
"emails": ["admin@example.com", "bob@example.com"],
|
"emails": ["admin@example.com", "bob@example.com"],
|
||||||
"run_as_user": False,
|
|
||||||
"env_vars": ["hello=world", "foo=bar"],
|
|
||||||
}
|
}
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
|
|
||||||
email_task.assert_called_with(
|
email_task.assert_called_with(
|
||||||
agentpk=self.agent.pk,
|
agentpk=self.agent.pk,
|
||||||
scriptpk=script.pk,
|
scriptpk=script.pk,
|
||||||
nats_timeout=18,
|
nats_timeout=18,
|
||||||
emails=[],
|
emails=[],
|
||||||
args=["abc", "123"],
|
args=["abc", "123"],
|
||||||
history_pk=hist.pk,
|
|
||||||
run_as_user=False,
|
|
||||||
env_vars=["hello=world", "foo=bar"],
|
|
||||||
)
|
)
|
||||||
email_task.reset_mock()
|
email_task.reset_mock()
|
||||||
|
|
||||||
@@ -590,16 +627,12 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
data["emailMode"] = "custom"
|
data["emailMode"] = "custom"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
|
|
||||||
email_task.assert_called_with(
|
email_task.assert_called_with(
|
||||||
agentpk=self.agent.pk,
|
agentpk=self.agent.pk,
|
||||||
scriptpk=script.pk,
|
scriptpk=script.pk,
|
||||||
nats_timeout=18,
|
nats_timeout=18,
|
||||||
emails=["admin@example.com", "bob@example.com"],
|
emails=["admin@example.com", "bob@example.com"],
|
||||||
args=["abc", "123"],
|
args=["abc", "123"],
|
||||||
history_pk=hist.pk,
|
|
||||||
run_as_user=False,
|
|
||||||
env_vars=["hello=world", "foo=bar"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# test fire and forget
|
# test fire and forget
|
||||||
@@ -608,8 +641,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"output": "forget",
|
"output": "forget",
|
||||||
"args": ["hello", "world"],
|
"args": ["hello", "world"],
|
||||||
"timeout": 22,
|
"timeout": 22,
|
||||||
"run_as_user": True,
|
|
||||||
"env_vars": ["hello=world", "foo=bar"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -619,19 +650,14 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
raise AgentHistory.DoesNotExist
|
raise AgentHistory.DoesNotExist
|
||||||
|
|
||||||
run_script.assert_called_with(
|
run_script.assert_called_with(
|
||||||
scriptpk=script.pk,
|
scriptpk=script.pk, args=["hello", "world"], timeout=25, history_pk=hist.pk
|
||||||
args=["hello", "world"],
|
|
||||||
timeout=25,
|
|
||||||
history_pk=hist.pk,
|
|
||||||
run_as_user=True,
|
|
||||||
env_vars=["hello=world", "foo=bar"],
|
|
||||||
)
|
)
|
||||||
run_script.reset_mock()
|
run_script.reset_mock()
|
||||||
|
|
||||||
# test collector
|
# test collector
|
||||||
|
|
||||||
# save to agent custom field
|
# save to agent custom field
|
||||||
custom_field = baker.make("core.CustomField", model=CustomFieldModel.AGENT)
|
custom_field = baker.make("core.CustomField", model="agent")
|
||||||
data = {
|
data = {
|
||||||
"script": script.pk,
|
"script": script.pk,
|
||||||
"output": "collector",
|
"output": "collector",
|
||||||
@@ -639,8 +665,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"timeout": 22,
|
"timeout": 22,
|
||||||
"custom_field": custom_field.pk,
|
"custom_field": custom_field.pk,
|
||||||
"save_all_output": True,
|
"save_all_output": True,
|
||||||
"run_as_user": False,
|
|
||||||
"env_vars": ["hello=world", "foo=bar"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -655,8 +679,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
timeout=25,
|
timeout=25,
|
||||||
wait=True,
|
wait=True,
|
||||||
history_pk=hist.pk,
|
history_pk=hist.pk,
|
||||||
run_as_user=False,
|
|
||||||
env_vars=["hello=world", "foo=bar"],
|
|
||||||
)
|
)
|
||||||
run_script.reset_mock()
|
run_script.reset_mock()
|
||||||
|
|
||||||
@@ -674,8 +696,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"timeout": 22,
|
"timeout": 22,
|
||||||
"custom_field": custom_field.pk,
|
"custom_field": custom_field.pk,
|
||||||
"save_all_output": False,
|
"save_all_output": False,
|
||||||
"run_as_user": False,
|
|
||||||
"env_vars": ["hello=world", "foo=bar"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -690,8 +710,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
timeout=25,
|
timeout=25,
|
||||||
wait=True,
|
wait=True,
|
||||||
history_pk=hist.pk,
|
history_pk=hist.pk,
|
||||||
run_as_user=False,
|
|
||||||
env_vars=["hello=world", "foo=bar"],
|
|
||||||
)
|
)
|
||||||
run_script.reset_mock()
|
run_script.reset_mock()
|
||||||
|
|
||||||
@@ -703,7 +721,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# save to client custom field
|
# save to client custom field
|
||||||
custom_field = baker.make("core.CustomField", model=CustomFieldModel.CLIENT)
|
custom_field = baker.make("core.CustomField", model="client")
|
||||||
data = {
|
data = {
|
||||||
"script": script.pk,
|
"script": script.pk,
|
||||||
"output": "collector",
|
"output": "collector",
|
||||||
@@ -711,8 +729,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"timeout": 22,
|
"timeout": 22,
|
||||||
"custom_field": custom_field.pk,
|
"custom_field": custom_field.pk,
|
||||||
"save_all_output": False,
|
"save_all_output": False,
|
||||||
"run_as_user": False,
|
|
||||||
"env_vars": ["hello=world", "foo=bar"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -727,8 +743,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
timeout=25,
|
timeout=25,
|
||||||
wait=True,
|
wait=True,
|
||||||
history_pk=hist.pk,
|
history_pk=hist.pk,
|
||||||
run_as_user=False,
|
|
||||||
env_vars=["hello=world", "foo=bar"],
|
|
||||||
)
|
)
|
||||||
run_script.reset_mock()
|
run_script.reset_mock()
|
||||||
|
|
||||||
@@ -745,8 +759,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
"output": "note",
|
"output": "note",
|
||||||
"args": ["hello", "world"],
|
"args": ["hello", "world"],
|
||||||
"timeout": 22,
|
"timeout": 22,
|
||||||
"run_as_user": False,
|
|
||||||
"env_vars": ["hello=world", "foo=bar"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -761,74 +773,11 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
timeout=25,
|
timeout=25,
|
||||||
wait=True,
|
wait=True,
|
||||||
history_pk=hist.pk,
|
history_pk=hist.pk,
|
||||||
run_as_user=False,
|
|
||||||
env_vars=["hello=world", "foo=bar"],
|
|
||||||
)
|
)
|
||||||
run_script.reset_mock()
|
run_script.reset_mock()
|
||||||
|
|
||||||
self.assertEqual(Note.objects.get(agent=self.agent).note, "ok")
|
self.assertEqual(Note.objects.get(agent=self.agent).note, "ok")
|
||||||
|
|
||||||
# test run on server
|
|
||||||
with patch("core.utils.run_server_script") as mock_run_server_script:
|
|
||||||
mock_run_server_script.return_value = ("output", "error", 1.23456789, 0)
|
|
||||||
data = {
|
|
||||||
"script": script.pk,
|
|
||||||
"output": "wait",
|
|
||||||
"args": ["arg1", "arg2"],
|
|
||||||
"timeout": 15,
|
|
||||||
"run_as_user": False,
|
|
||||||
"env_vars": ["key1=val1", "key2=val2"],
|
|
||||||
"run_on_server": True,
|
|
||||||
}
|
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
|
|
||||||
if not hist:
|
|
||||||
raise AgentHistory.DoesNotExist
|
|
||||||
|
|
||||||
mock_run_server_script.assert_called_with(
|
|
||||||
body=script.script_body,
|
|
||||||
args=script.parse_script_args(self.agent, script.shell, data["args"]),
|
|
||||||
env_vars=script.parse_script_env_vars(
|
|
||||||
self.agent, script.shell, data["env_vars"]
|
|
||||||
),
|
|
||||||
shell=script.shell,
|
|
||||||
timeout=18,
|
|
||||||
)
|
|
||||||
|
|
||||||
expected_ret = {
|
|
||||||
"stdout": "output",
|
|
||||||
"stderr": "error",
|
|
||||||
"execution_time": "1.2346",
|
|
||||||
"retcode": 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
self.assertEqual(r.data, expected_ret)
|
|
||||||
|
|
||||||
hist.refresh_from_db()
|
|
||||||
expected_script_results = {**expected_ret, "id": hist.pk}
|
|
||||||
self.assertEqual(hist.script_results, expected_script_results)
|
|
||||||
|
|
||||||
# test run on server with server scripts disabled
|
|
||||||
with patch(
|
|
||||||
"core.models.CoreSettings.server_scripts_enabled",
|
|
||||||
new_callable=PropertyMock,
|
|
||||||
) as server_scripts_enabled:
|
|
||||||
server_scripts_enabled.return_value = False
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"script": script.pk,
|
|
||||||
"output": "wait",
|
|
||||||
"args": ["arg1", "arg2"],
|
|
||||||
"timeout": 15,
|
|
||||||
"run_as_user": False,
|
|
||||||
"env_vars": ["key1=val1", "key2=val2"],
|
|
||||||
"run_on_server": True,
|
|
||||||
}
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
def test_get_notes(self):
|
def test_get_notes(self):
|
||||||
url = f"{base_url}/notes/"
|
url = f"{base_url}/notes/"
|
||||||
|
|
||||||
@@ -916,6 +865,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
def test_get_agent_history(self):
|
def test_get_agent_history(self):
|
||||||
|
|
||||||
# setup data
|
# setup data
|
||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
history = baker.make("agents.AgentHistory", agent=agent, _quantity=30)
|
history = baker.make("agents.AgentHistory", agent=agent, _quantity=30)
|
||||||
@@ -927,12 +877,17 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
# test pulling data
|
# test pulling data
|
||||||
r = self.client.get(url, format="json")
|
r = self.client.get(url, format="json")
|
||||||
ctx = {"default_tz": ZoneInfo("America/Los_Angeles")}
|
ctx = {"default_tz": pytz.timezone("America/Los_Angeles")}
|
||||||
data = AgentHistorySerializer(history, many=True, context=ctx).data
|
data = AgentHistorySerializer(history, many=True, context=ctx).data
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data, data) # type:ignore
|
self.assertEqual(r.data, data) # type:ignore
|
||||||
|
|
||||||
|
|
||||||
|
@modify_settings(
|
||||||
|
MIDDLEWARE={
|
||||||
|
"remove": "tacticalrmm.middleware.LinuxMiddleware",
|
||||||
|
}
|
||||||
|
)
|
||||||
class TestAgentViewsNew(TacticalTestCase):
|
class TestAgentViewsNew(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.authenticate()
|
self.authenticate()
|
||||||
@@ -967,6 +922,11 @@ class TestAgentViewsNew(TacticalTestCase):
|
|||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
|
|
||||||
|
@modify_settings(
|
||||||
|
MIDDLEWARE={
|
||||||
|
"remove": "tacticalrmm.middleware.LinuxMiddleware",
|
||||||
|
}
|
||||||
|
)
|
||||||
class TestAgentPermissions(TacticalTestCase):
|
class TestAgentPermissions(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.setup_client()
|
self.setup_client()
|
||||||
@@ -1071,6 +1031,7 @@ class TestAgentPermissions(TacticalTestCase):
|
|||||||
@patch("time.sleep")
|
@patch("time.sleep")
|
||||||
@patch("agents.models.Agent.nats_cmd", return_value="ok")
|
@patch("agents.models.Agent.nats_cmd", return_value="ok")
|
||||||
def test_agent_actions_permissions(self, nats_cmd, sleep):
|
def test_agent_actions_permissions(self, nats_cmd, sleep):
|
||||||
|
|
||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||||
|
|
||||||
@@ -1081,6 +1042,7 @@ class TestAgentPermissions(TacticalTestCase):
|
|||||||
{"method": "post", "action": "recover", "role": "can_recover_agents"},
|
{"method": "post", "action": "recover", "role": "can_recover_agents"},
|
||||||
{"method": "post", "action": "reboot", "role": "can_reboot_agents"},
|
{"method": "post", "action": "reboot", "role": "can_reboot_agents"},
|
||||||
{"method": "patch", "action": "reboot", "role": "can_reboot_agents"},
|
{"method": "patch", "action": "reboot", "role": "can_reboot_agents"},
|
||||||
|
{"method": "get", "action": "ping", "role": "can_ping_agents"},
|
||||||
{"method": "get", "action": "meshcentral", "role": "can_use_mesh"},
|
{"method": "get", "action": "meshcentral", "role": "can_use_mesh"},
|
||||||
{"method": "post", "action": "meshcentral/recover", "role": "can_use_mesh"},
|
{"method": "post", "action": "meshcentral/recover", "role": "can_use_mesh"},
|
||||||
{"method": "get", "action": "processes", "role": "can_manage_procs"},
|
{"method": "get", "action": "processes", "role": "can_manage_procs"},
|
||||||
@@ -1160,6 +1122,55 @@ class TestAgentPermissions(TacticalTestCase):
|
|||||||
self.check_authorized("post", url, site_data)
|
self.check_authorized("post", url, site_data)
|
||||||
self.check_authorized("post", url, client_data)
|
self.check_authorized("post", url, client_data)
|
||||||
|
|
||||||
|
@patch("agents.tasks.send_agent_update_task.delay")
|
||||||
|
def test_agent_update_permissions(self, update_task):
|
||||||
|
agents = baker.make_recipe("agents.agent", _quantity=5)
|
||||||
|
other_agents = baker.make_recipe("agents.agent", _quantity=7)
|
||||||
|
|
||||||
|
url = f"{base_url}/update/"
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"agent_ids": [agent.agent_id for agent in agents]
|
||||||
|
+ [agent.agent_id for agent in other_agents]
|
||||||
|
}
|
||||||
|
|
||||||
|
# test superuser access
|
||||||
|
self.check_authorized_superuser("post", url, data)
|
||||||
|
update_task.assert_called_with(agent_ids=data["agent_ids"])
|
||||||
|
update_task.reset_mock()
|
||||||
|
|
||||||
|
user = self.create_user_with_roles([])
|
||||||
|
self.client.force_authenticate(user=user)
|
||||||
|
|
||||||
|
self.check_not_authorized("post", url, data)
|
||||||
|
update_task.assert_not_called()
|
||||||
|
|
||||||
|
user.role.can_update_agents = True
|
||||||
|
user.role.save()
|
||||||
|
|
||||||
|
self.check_authorized("post", url, data)
|
||||||
|
update_task.assert_called_with(agent_ids=data["agent_ids"])
|
||||||
|
update_task.reset_mock()
|
||||||
|
|
||||||
|
# limit to client
|
||||||
|
# user.role.can_view_clients.set([agents[0].client])
|
||||||
|
# self.check_authorized("post", url, data)
|
||||||
|
# update_task.assert_called_with(agent_ids=[agent.agent_id for agent in agents])
|
||||||
|
# update_task.reset_mock()
|
||||||
|
|
||||||
|
# add site
|
||||||
|
# user.role.can_view_sites.set([other_agents[0].site])
|
||||||
|
# self.check_authorized("post", url, data)
|
||||||
|
# update_task.assert_called_with(agent_ids=data["agent_ids"])
|
||||||
|
# update_task.reset_mock()
|
||||||
|
|
||||||
|
# remove client permissions
|
||||||
|
# user.role.can_view_clients.clear()
|
||||||
|
# self.check_authorized("post", url, data)
|
||||||
|
# update_task.assert_called_with(
|
||||||
|
# agent_ids=[agent.agent_id for agent in other_agents]
|
||||||
|
# )
|
||||||
|
|
||||||
def test_get_agent_version_permissions(self):
|
def test_get_agent_version_permissions(self):
|
||||||
agents = baker.make_recipe("agents.agent", _quantity=5)
|
agents = baker.make_recipe("agents.agent", _quantity=5)
|
||||||
other_agents = baker.make_recipe("agents.agent", _quantity=7)
|
other_agents = baker.make_recipe("agents.agent", _quantity=7)
|
||||||
@@ -1197,6 +1208,7 @@ class TestAgentPermissions(TacticalTestCase):
|
|||||||
self.assertEqual(len(response.data["agents"]), 7)
|
self.assertEqual(len(response.data["agents"]), 7)
|
||||||
|
|
||||||
def test_generating_agent_installer_permissions(self):
|
def test_generating_agent_installer_permissions(self):
|
||||||
|
|
||||||
client = baker.make("clients.Client")
|
client = baker.make("clients.Client")
|
||||||
client_site = baker.make("clients.Site", client=client)
|
client_site = baker.make("clients.Site", client=client)
|
||||||
site = baker.make("clients.Site")
|
site = baker.make("clients.Site")
|
||||||
@@ -1259,6 +1271,7 @@ class TestAgentPermissions(TacticalTestCase):
|
|||||||
self.check_not_authorized("post", url, data)
|
self.check_not_authorized("post", url, data)
|
||||||
|
|
||||||
def test_agent_notes_permissions(self):
|
def test_agent_notes_permissions(self):
|
||||||
|
|
||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
notes = baker.make("agents.Note", agent=agent, _quantity=5)
|
notes = baker.make("agents.Note", agent=agent, _quantity=5)
|
||||||
|
|
||||||
@@ -1347,9 +1360,9 @@ class TestAgentPermissions(TacticalTestCase):
|
|||||||
|
|
||||||
sites = baker.make("clients.Site", _quantity=2)
|
sites = baker.make("clients.Site", _quantity=2)
|
||||||
agent = baker.make_recipe("agents.agent", site=sites[0])
|
agent = baker.make_recipe("agents.agent", site=sites[0])
|
||||||
history = baker.make("agents.AgentHistory", agent=agent, _quantity=5) # noqa
|
history = baker.make("agents.AgentHistory", agent=agent, _quantity=5)
|
||||||
unauthorized_agent = baker.make_recipe("agents.agent", site=sites[1])
|
unauthorized_agent = baker.make_recipe("agents.agent", site=sites[1])
|
||||||
unauthorized_history = baker.make( # noqa
|
unauthorized_history = baker.make(
|
||||||
"agents.AgentHistory", agent=unauthorized_agent, _quantity=6
|
"agents.AgentHistory", agent=unauthorized_agent, _quantity=6
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1387,13 +1400,154 @@ class TestAgentPermissions(TacticalTestCase):
|
|||||||
self.check_authorized_superuser("get", unauthorized_url)
|
self.check_authorized_superuser("get", unauthorized_url)
|
||||||
|
|
||||||
|
|
||||||
|
@modify_settings(
|
||||||
|
MIDDLEWARE={
|
||||||
|
"remove": "tacticalrmm.middleware.LinuxMiddleware",
|
||||||
|
}
|
||||||
|
)
|
||||||
class TestAgentTasks(TacticalTestCase):
|
class TestAgentTasks(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.authenticate()
|
self.authenticate()
|
||||||
self.setup_coresettings()
|
self.setup_coresettings()
|
||||||
|
|
||||||
|
@patch("agents.utils.get_agent_url")
|
||||||
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
|
def test_agent_update(self, nats_cmd, get_url):
|
||||||
|
get_url.return_value = "https://exe.tacticalrmm.io"
|
||||||
|
|
||||||
|
from agents.tasks import agent_update
|
||||||
|
|
||||||
|
agent_noarch = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Error getting OS",
|
||||||
|
version=settings.LATEST_AGENT_VER,
|
||||||
|
)
|
||||||
|
r = agent_update(agent_noarch.agent_id)
|
||||||
|
self.assertEqual(r, "noarch")
|
||||||
|
|
||||||
|
agent_130 = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.3.0",
|
||||||
|
)
|
||||||
|
r = agent_update(agent_130.agent_id)
|
||||||
|
self.assertEqual(r, "not supported")
|
||||||
|
|
||||||
|
# test __without__ code signing
|
||||||
|
agent64_nosign = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.4.14",
|
||||||
|
)
|
||||||
|
|
||||||
|
r = agent_update(agent64_nosign.agent_id)
|
||||||
|
self.assertEqual(r, "created")
|
||||||
|
action = PendingAction.objects.get(agent__agent_id=agent64_nosign.agent_id)
|
||||||
|
self.assertEqual(action.action_type, "agentupdate")
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
self.assertEqual(
|
||||||
|
action.details["url"],
|
||||||
|
f"https://github.com/amidaware/rmmagent/releases/download/v{settings.LATEST_AGENT_VER}/winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
action.details["inno"], f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||||
|
)
|
||||||
|
self.assertEqual(action.details["version"], settings.LATEST_AGENT_VER)
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{
|
||||||
|
"func": "agentupdate",
|
||||||
|
"payload": {
|
||||||
|
"url": f"https://github.com/amidaware/rmmagent/releases/download/v{settings.LATEST_AGENT_VER}/winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||||
|
"version": settings.LATEST_AGENT_VER,
|
||||||
|
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wait=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# test __with__ code signing (64 bit)
|
||||||
|
""" codesign = baker.make("core.CodeSignToken", token="testtoken123")
|
||||||
|
agent64_sign = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.4.14",
|
||||||
|
)
|
||||||
|
|
||||||
|
nats_cmd.return_value = "ok"
|
||||||
|
get_exe.return_value = "https://exe.tacticalrmm.io"
|
||||||
|
r = agent_update(agent64_sign.pk, codesign.token)
|
||||||
|
self.assertEqual(r, "created")
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{
|
||||||
|
"func": "agentupdate",
|
||||||
|
"payload": {
|
||||||
|
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=64&token=testtoken123",
|
||||||
|
"version": settings.LATEST_AGENT_VER,
|
||||||
|
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wait=False,
|
||||||
|
)
|
||||||
|
action = PendingAction.objects.get(agent__pk=agent64_sign.pk)
|
||||||
|
self.assertEqual(action.action_type, "agentupdate")
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
|
||||||
|
# test __with__ code signing (32 bit)
|
||||||
|
agent32_sign = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 32 bit (build 19041.450)",
|
||||||
|
version="1.4.14",
|
||||||
|
)
|
||||||
|
|
||||||
|
nats_cmd.return_value = "ok"
|
||||||
|
get_exe.return_value = "https://exe.tacticalrmm.io"
|
||||||
|
r = agent_update(agent32_sign.pk, codesign.token)
|
||||||
|
self.assertEqual(r, "created")
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{
|
||||||
|
"func": "agentupdate",
|
||||||
|
"payload": {
|
||||||
|
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=32&token=testtoken123",
|
||||||
|
"version": settings.LATEST_AGENT_VER,
|
||||||
|
"inno": f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wait=False,
|
||||||
|
)
|
||||||
|
action = PendingAction.objects.get(agent__pk=agent32_sign.pk)
|
||||||
|
self.assertEqual(action.action_type, "agentupdate")
|
||||||
|
self.assertEqual(action.status, "pending") """
|
||||||
|
|
||||||
|
@patch("agents.tasks.agent_update")
|
||||||
|
@patch("agents.tasks.sleep", return_value=None)
|
||||||
|
def test_auto_self_agent_update_task(self, mock_sleep, agent_update):
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version=settings.LATEST_AGENT_VER,
|
||||||
|
_quantity=23,
|
||||||
|
)
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.3.0",
|
||||||
|
_quantity=33,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.coresettings.agent_auto_update = False
|
||||||
|
self.coresettings.save(update_fields=["agent_auto_update"])
|
||||||
|
|
||||||
|
r = auto_self_agent_update_task.s().apply()
|
||||||
|
self.assertEqual(agent_update.call_count, 0)
|
||||||
|
|
||||||
|
self.coresettings.agent_auto_update = True
|
||||||
|
self.coresettings.save(update_fields=["agent_auto_update"])
|
||||||
|
|
||||||
|
r = auto_self_agent_update_task.s().apply()
|
||||||
|
self.assertEqual(agent_update.call_count, 33)
|
||||||
|
|
||||||
def test_agent_history_prune_task(self):
|
def test_agent_history_prune_task(self):
|
||||||
from agents.tasks import prune_agent_history
|
from .tasks import prune_agent_history
|
||||||
|
|
||||||
# setup data
|
# setup data
|
||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
@@ -1,106 +0,0 @@
|
|||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
from rest_framework.response import Response
|
|
||||||
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestAgentInstalls(TacticalTestCase):
|
|
||||||
def setUp(self) -> None:
|
|
||||||
self.authenticate()
|
|
||||||
self.setup_coresettings()
|
|
||||||
self.setup_base_instance()
|
|
||||||
|
|
||||||
@patch("agents.utils.generate_linux_install")
|
|
||||||
@patch("knox.models.AuthToken.objects.create")
|
|
||||||
@patch("tacticalrmm.utils.generate_winagent_exe")
|
|
||||||
@patch("core.utils.token_is_valid")
|
|
||||||
@patch("agents.utils.get_agent_url")
|
|
||||||
def test_install_agent(
|
|
||||||
self,
|
|
||||||
mock_agent_url,
|
|
||||||
mock_token_valid,
|
|
||||||
mock_gen_win_exe,
|
|
||||||
mock_auth,
|
|
||||||
mock_linux_install,
|
|
||||||
):
|
|
||||||
mock_agent_url.return_value = "https://example.com"
|
|
||||||
mock_token_valid.return_value = "", False
|
|
||||||
mock_gen_win_exe.return_value = Response("ok")
|
|
||||||
mock_auth.return_value = "", "token"
|
|
||||||
mock_linux_install.return_value = Response("ok")
|
|
||||||
|
|
||||||
url = "/agents/installer/"
|
|
||||||
|
|
||||||
# test windows dynamic exe
|
|
||||||
data = {
|
|
||||||
"installMethod": "exe",
|
|
||||||
"client": self.site2.client.pk,
|
|
||||||
"site": self.site2.pk,
|
|
||||||
"expires": 24,
|
|
||||||
"agenttype": "server",
|
|
||||||
"power": 0,
|
|
||||||
"rdp": 1,
|
|
||||||
"ping": 0,
|
|
||||||
"goarch": "amd64",
|
|
||||||
"api": "https://api.example.com",
|
|
||||||
"fileName": "rmm-client-site-server.exe",
|
|
||||||
"plat": "windows",
|
|
||||||
}
|
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
mock_gen_win_exe.assert_called_with(
|
|
||||||
client=self.site2.client.pk,
|
|
||||||
site=self.site2.pk,
|
|
||||||
agent_type="server",
|
|
||||||
rdp=1,
|
|
||||||
ping=0,
|
|
||||||
power=0,
|
|
||||||
goarch="amd64",
|
|
||||||
token="token",
|
|
||||||
api="https://api.example.com",
|
|
||||||
file_name="rmm-client-site-server.exe",
|
|
||||||
)
|
|
||||||
|
|
||||||
# test linux no code sign
|
|
||||||
data["plat"] = "linux"
|
|
||||||
data["installMethod"] = "bash"
|
|
||||||
data["rdp"] = 0
|
|
||||||
data["agenttype"] = "workstation"
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
# test linux
|
|
||||||
mock_token_valid.return_value = "token123", True
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
mock_linux_install.assert_called_with(
|
|
||||||
client=str(self.site2.client.pk),
|
|
||||||
site=str(self.site2.pk),
|
|
||||||
agent_type="workstation",
|
|
||||||
arch="amd64",
|
|
||||||
token="token",
|
|
||||||
api="https://api.example.com",
|
|
||||||
download_url="https://example.com",
|
|
||||||
)
|
|
||||||
|
|
||||||
# test manual
|
|
||||||
data["rdp"] = 1
|
|
||||||
data["installMethod"] = "manual"
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertIn("rdp", r.json()["cmd"])
|
|
||||||
self.assertNotIn("power", r.json()["cmd"])
|
|
||||||
|
|
||||||
data.update({"ping": 1, "power": 1})
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertIn("power", r.json()["cmd"])
|
|
||||||
self.assertIn("ping", r.json()["cmd"])
|
|
||||||
|
|
||||||
# test powershell
|
|
||||||
data["installMethod"] = "powershell"
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
from model_bakery import baker
|
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from tacticalrmm.constants import AgentMonType
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class AgentSaveTestCase(TacticalTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.client1 = baker.make("clients.Client")
|
|
||||||
self.client2 = baker.make("clients.Client")
|
|
||||||
self.site1 = baker.make("clients.Site", client=self.client1)
|
|
||||||
self.site2 = baker.make("clients.Site", client=self.client2)
|
|
||||||
self.site3 = baker.make("clients.Site", client=self.client2)
|
|
||||||
self.agent = baker.make(
|
|
||||||
"agents.Agent",
|
|
||||||
site=self.site1,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
)
|
|
||||||
|
|
||||||
@patch.object(Agent, "set_alert_template")
|
|
||||||
def test_set_alert_template_called_on_mon_type_change(
|
|
||||||
self, mock_set_alert_template
|
|
||||||
):
|
|
||||||
self.agent.monitoring_type = AgentMonType.WORKSTATION
|
|
||||||
self.agent.save()
|
|
||||||
mock_set_alert_template.assert_called_once()
|
|
||||||
|
|
||||||
@patch.object(Agent, "set_alert_template")
|
|
||||||
def test_set_alert_template_called_on_site_change(self, mock_set_alert_template):
|
|
||||||
self.agent.site = self.site2
|
|
||||||
self.agent.save()
|
|
||||||
mock_set_alert_template.assert_called_once()
|
|
||||||
|
|
||||||
@patch.object(Agent, "set_alert_template")
|
|
||||||
def test_set_alert_template_called_on_site_and_montype_change(
|
|
||||||
self, mock_set_alert_template
|
|
||||||
):
|
|
||||||
print(f"before: {self.agent.monitoring_type} site: {self.agent.site_id}")
|
|
||||||
self.agent.site = self.site3
|
|
||||||
self.agent.monitoring_type = AgentMonType.WORKSTATION
|
|
||||||
self.agent.save()
|
|
||||||
mock_set_alert_template.assert_called_once()
|
|
||||||
print(f"after: {self.agent.monitoring_type} site: {self.agent.site_id}")
|
|
||||||
|
|
||||||
@patch.object(Agent, "set_alert_template")
|
|
||||||
def test_set_alert_template_not_called_without_changes(
|
|
||||||
self, mock_set_alert_template
|
|
||||||
):
|
|
||||||
self.agent.save()
|
|
||||||
mock_set_alert_template.assert_not_called()
|
|
||||||
|
|
||||||
@patch.object(Agent, "set_alert_template")
|
|
||||||
def test_set_alert_template_not_called_on_non_relevant_field_change(
|
|
||||||
self, mock_set_alert_template
|
|
||||||
):
|
|
||||||
self.agent.hostname = "abc123"
|
|
||||||
self.agent.save()
|
|
||||||
mock_set_alert_template.assert_not_called()
|
|
||||||
@@ -1,313 +0,0 @@
|
|||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.management import call_command
|
|
||||||
from model_bakery import baker
|
|
||||||
from packaging import version as pyver
|
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from agents.tasks import auto_self_agent_update_task, send_agent_update_task
|
|
||||||
from logs.models import PendingAction
|
|
||||||
from tacticalrmm.constants import (
|
|
||||||
AGENT_DEFER,
|
|
||||||
AgentMonType,
|
|
||||||
AgentPlat,
|
|
||||||
GoArch,
|
|
||||||
PAAction,
|
|
||||||
PAStatus,
|
|
||||||
)
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestAgentUpdate(TacticalTestCase):
|
|
||||||
def setUp(self) -> None:
|
|
||||||
self.authenticate()
|
|
||||||
self.setup_coresettings()
|
|
||||||
self.setup_base_instance()
|
|
||||||
|
|
||||||
@patch("agents.management.commands.update_agents.send_agent_update_task.delay")
|
|
||||||
@patch("agents.management.commands.update_agents.token_is_valid")
|
|
||||||
@patch("agents.management.commands.update_agents.get_core_settings")
|
|
||||||
def test_update_agents_mgmt_command(self, mock_core, mock_token, mock_update):
|
|
||||||
mock_token.return_value = ("token123", True)
|
|
||||||
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site1,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.WINDOWS,
|
|
||||||
version="2.0.3",
|
|
||||||
_quantity=6,
|
|
||||||
)
|
|
||||||
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site3,
|
|
||||||
monitoring_type=AgentMonType.WORKSTATION,
|
|
||||||
plat=AgentPlat.LINUX,
|
|
||||||
version="2.0.3",
|
|
||||||
_quantity=5,
|
|
||||||
)
|
|
||||||
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site2,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.WINDOWS,
|
|
||||||
version=settings.LATEST_AGENT_VER,
|
|
||||||
_quantity=8,
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_core.return_value.agent_auto_update = False
|
|
||||||
call_command("update_agents")
|
|
||||||
mock_update.assert_not_called()
|
|
||||||
|
|
||||||
mock_core.return_value.agent_auto_update = True
|
|
||||||
call_command("update_agents")
|
|
||||||
|
|
||||||
ids = list(
|
|
||||||
Agent.objects.defer(*AGENT_DEFER)
|
|
||||||
.exclude(version=settings.LATEST_AGENT_VER)
|
|
||||||
.values_list("agent_id", flat=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_update.assert_called_with(agent_ids=ids, token="token123", force=False)
|
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
|
||||||
@patch("agents.models.get_agent_url")
|
|
||||||
def test_do_update(self, mock_agent_url, mock_nats_cmd):
|
|
||||||
mock_agent_url.return_value = "https://example.com/123"
|
|
||||||
|
|
||||||
# test noarch
|
|
||||||
agent_noarch = baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site1,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.WINDOWS,
|
|
||||||
version="2.1.1",
|
|
||||||
)
|
|
||||||
r = agent_noarch.do_update(token="", force=True)
|
|
||||||
self.assertEqual(r, "noarch")
|
|
||||||
|
|
||||||
# test too old
|
|
||||||
agent_old = baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site2,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.WINDOWS,
|
|
||||||
version="1.3.0",
|
|
||||||
goarch=GoArch.AMD64,
|
|
||||||
)
|
|
||||||
r = agent_old.do_update(token="", force=True)
|
|
||||||
self.assertEqual(r, "not supported")
|
|
||||||
|
|
||||||
win = baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site1,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.WINDOWS,
|
|
||||||
version="2.1.1",
|
|
||||||
goarch=GoArch.AMD64,
|
|
||||||
)
|
|
||||||
|
|
||||||
lin = baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site3,
|
|
||||||
monitoring_type=AgentMonType.WORKSTATION,
|
|
||||||
plat=AgentPlat.LINUX,
|
|
||||||
version="2.1.1",
|
|
||||||
goarch=GoArch.ARM32,
|
|
||||||
)
|
|
||||||
|
|
||||||
# test windows agent update
|
|
||||||
r = win.do_update(token="", force=False)
|
|
||||||
self.assertEqual(r, "created")
|
|
||||||
mock_nats_cmd.assert_called_with(
|
|
||||||
{
|
|
||||||
"func": "agentupdate",
|
|
||||||
"payload": {
|
|
||||||
"url": "https://example.com/123",
|
|
||||||
"version": settings.LATEST_AGENT_VER,
|
|
||||||
"inno": f"tacticalagent-v{settings.LATEST_AGENT_VER}-windows-amd64.exe",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
wait=False,
|
|
||||||
)
|
|
||||||
action1 = PendingAction.objects.get(agent__agent_id=win.agent_id)
|
|
||||||
self.assertEqual(action1.action_type, PAAction.AGENT_UPDATE)
|
|
||||||
self.assertEqual(action1.status, PAStatus.PENDING)
|
|
||||||
self.assertEqual(action1.details["url"], "https://example.com/123")
|
|
||||||
self.assertEqual(
|
|
||||||
action1.details["inno"],
|
|
||||||
f"tacticalagent-v{settings.LATEST_AGENT_VER}-windows-amd64.exe",
|
|
||||||
)
|
|
||||||
self.assertEqual(action1.details["version"], settings.LATEST_AGENT_VER)
|
|
||||||
|
|
||||||
mock_nats_cmd.reset_mock()
|
|
||||||
|
|
||||||
# test linux agent update
|
|
||||||
r = lin.do_update(token="", force=False)
|
|
||||||
mock_nats_cmd.assert_called_with(
|
|
||||||
{
|
|
||||||
"func": "agentupdate",
|
|
||||||
"payload": {
|
|
||||||
"url": "https://example.com/123",
|
|
||||||
"version": settings.LATEST_AGENT_VER,
|
|
||||||
"inno": f"tacticalagent-v{settings.LATEST_AGENT_VER}-linux-arm.exe",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
wait=False,
|
|
||||||
)
|
|
||||||
action2 = PendingAction.objects.get(agent__agent_id=lin.agent_id)
|
|
||||||
self.assertEqual(action2.action_type, PAAction.AGENT_UPDATE)
|
|
||||||
self.assertEqual(action2.status, PAStatus.PENDING)
|
|
||||||
self.assertEqual(action2.details["url"], "https://example.com/123")
|
|
||||||
self.assertEqual(
|
|
||||||
action2.details["inno"],
|
|
||||||
f"tacticalagent-v{settings.LATEST_AGENT_VER}-linux-arm.exe",
|
|
||||||
)
|
|
||||||
self.assertEqual(action2.details["version"], settings.LATEST_AGENT_VER)
|
|
||||||
|
|
||||||
# check if old agent update pending actions are being deleted
|
|
||||||
# should only be 1 pending action at all times
|
|
||||||
pa_count = win.pendingactions.filter(
|
|
||||||
action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING
|
|
||||||
).count()
|
|
||||||
self.assertEqual(pa_count, 1)
|
|
||||||
|
|
||||||
for _ in range(4):
|
|
||||||
win.do_update(token="", force=False)
|
|
||||||
|
|
||||||
pa_count = win.pendingactions.filter(
|
|
||||||
action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING
|
|
||||||
).count()
|
|
||||||
self.assertEqual(pa_count, 1)
|
|
||||||
|
|
||||||
def test_auto_self_agent_update_task(self):
|
|
||||||
auto_self_agent_update_task()
|
|
||||||
|
|
||||||
@patch("agents.models.Agent.do_update")
|
|
||||||
def test_send_agent_update_task(self, mock_update):
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site2,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.WINDOWS,
|
|
||||||
version="2.1.1",
|
|
||||||
goarch=GoArch.AMD64,
|
|
||||||
_quantity=6,
|
|
||||||
)
|
|
||||||
ids = list(
|
|
||||||
Agent.objects.defer(*AGENT_DEFER)
|
|
||||||
.exclude(version=settings.LATEST_AGENT_VER)
|
|
||||||
.values_list("agent_id", flat=True)
|
|
||||||
)
|
|
||||||
send_agent_update_task(agent_ids=ids, token="", force=False)
|
|
||||||
self.assertEqual(mock_update.call_count, 6)
|
|
||||||
|
|
||||||
@patch("agents.views.token_is_valid")
|
|
||||||
@patch("agents.tasks.send_agent_update_task.delay")
|
|
||||||
def test_update_agents(self, mock_update, mock_token):
|
|
||||||
mock_token.return_value = ("", False)
|
|
||||||
url = "/agents/update/"
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site2,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.WINDOWS,
|
|
||||||
version="2.1.1",
|
|
||||||
goarch=GoArch.AMD64,
|
|
||||||
_quantity=7,
|
|
||||||
)
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site2,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.WINDOWS,
|
|
||||||
version=settings.LATEST_AGENT_VER,
|
|
||||||
goarch=GoArch.AMD64,
|
|
||||||
_quantity=3,
|
|
||||||
)
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site2,
|
|
||||||
monitoring_type=AgentMonType.WORKSTATION,
|
|
||||||
plat=AgentPlat.LINUX,
|
|
||||||
version="2.0.1",
|
|
||||||
goarch=GoArch.ARM32,
|
|
||||||
_quantity=9,
|
|
||||||
)
|
|
||||||
|
|
||||||
agent_ids: list[str] = list(
|
|
||||||
Agent.objects.only("agent_id").values_list("agent_id", flat=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
data = {"agent_ids": agent_ids}
|
|
||||||
expected: list[str] = [
|
|
||||||
i.agent_id
|
|
||||||
for i in Agent.objects.only("agent_id", "version")
|
|
||||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
|
||||||
]
|
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
mock_update.assert_called_with(agent_ids=expected, token="", force=False)
|
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
|
||||||
|
|
||||||
@patch("agents.views.token_is_valid")
|
|
||||||
@patch("agents.tasks.send_agent_update_task.delay")
|
|
||||||
def test_agent_update_permissions(self, update_task, mock_token):
|
|
||||||
mock_token.return_value = ("", False)
|
|
||||||
|
|
||||||
agents = baker.make_recipe("agents.agent", _quantity=5)
|
|
||||||
other_agents = baker.make_recipe("agents.agent", _quantity=7)
|
|
||||||
|
|
||||||
url = "/agents/update/"
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"agent_ids": [agent.agent_id for agent in agents]
|
|
||||||
+ [agent.agent_id for agent in other_agents]
|
|
||||||
}
|
|
||||||
|
|
||||||
# test superuser access
|
|
||||||
self.check_authorized_superuser("post", url, data)
|
|
||||||
update_task.assert_called_with(
|
|
||||||
agent_ids=data["agent_ids"], token="", force=False
|
|
||||||
)
|
|
||||||
update_task.reset_mock()
|
|
||||||
|
|
||||||
user = self.create_user_with_roles([])
|
|
||||||
self.client.force_authenticate(user=user)
|
|
||||||
|
|
||||||
self.check_not_authorized("post", url, data)
|
|
||||||
update_task.assert_not_called()
|
|
||||||
|
|
||||||
user.role.can_update_agents = True
|
|
||||||
user.role.save()
|
|
||||||
|
|
||||||
self.check_authorized("post", url, data)
|
|
||||||
update_task.assert_called_with(
|
|
||||||
agent_ids=data["agent_ids"], token="", force=False
|
|
||||||
)
|
|
||||||
update_task.reset_mock()
|
|
||||||
|
|
||||||
# limit to client
|
|
||||||
# user.role.can_view_clients.set([agents[0].client])
|
|
||||||
# self.check_authorized("post", url, data)
|
|
||||||
# update_task.assert_called_with(agent_ids=[agent.agent_id for agent in agents])
|
|
||||||
# update_task.reset_mock()
|
|
||||||
|
|
||||||
# add site
|
|
||||||
# user.role.can_view_sites.set([other_agents[0].site])
|
|
||||||
# self.check_authorized("post", url, data)
|
|
||||||
# update_task.assert_called_with(agent_ids=data["agent_ids"])
|
|
||||||
# update_task.reset_mock()
|
|
||||||
|
|
||||||
# remove client permissions
|
|
||||||
# user.role.can_view_clients.clear()
|
|
||||||
# self.check_authorized("post", url, data)
|
|
||||||
# update_task.assert_called_with(
|
|
||||||
# agent_ids=[agent.agent_id for agent in other_agents]
|
|
||||||
# )
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
from agents.utils import generate_linux_install, get_agent_url
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestAgentUtils(TacticalTestCase):
|
|
||||||
def setUp(self) -> None:
|
|
||||||
self.authenticate()
|
|
||||||
self.setup_coresettings()
|
|
||||||
self.setup_base_instance()
|
|
||||||
|
|
||||||
def test_get_agent_url(self):
|
|
||||||
ver = settings.LATEST_AGENT_VER
|
|
||||||
|
|
||||||
# test without token
|
|
||||||
r = get_agent_url(goarch="amd64", plat="windows", token="")
|
|
||||||
expected = f"https://github.com/amidaware/rmmagent/releases/download/v{ver}/tacticalagent-v{ver}-windows-amd64.exe"
|
|
||||||
self.assertEqual(r, expected)
|
|
||||||
|
|
||||||
# test with token
|
|
||||||
r = get_agent_url(goarch="386", plat="linux", token="token123")
|
|
||||||
expected = f"https://{settings.AGENTS_URL}version={ver}&arch=386&token=token123&plat=linux&api=api.example.com"
|
|
||||||
|
|
||||||
@patch("agents.utils.get_mesh_device_id")
|
|
||||||
@patch("agents.utils.asyncio.run")
|
|
||||||
@patch("agents.utils.get_mesh_ws_url")
|
|
||||||
@patch("agents.utils.get_core_settings")
|
|
||||||
def test_generate_linux_install(
|
|
||||||
self, mock_core, mock_mesh, mock_async_run, mock_mesh_device_id
|
|
||||||
):
|
|
||||||
mock_mesh_device_id.return_value = "meshdeviceid"
|
|
||||||
mock_core.return_value.mesh_site = "meshsite"
|
|
||||||
mock_async_run.return_value = "meshid"
|
|
||||||
mock_mesh.return_value = "meshws"
|
|
||||||
r = generate_linux_install(
|
|
||||||
client="1",
|
|
||||||
site="1",
|
|
||||||
agent_type="server",
|
|
||||||
arch="amd64",
|
|
||||||
token="token123",
|
|
||||||
api="api.example.com",
|
|
||||||
download_url="asdasd3423",
|
|
||||||
)
|
|
||||||
|
|
||||||
ret = r.getvalue().decode("utf-8")
|
|
||||||
|
|
||||||
self.assertIn(r"agentDL='asdasd3423'", ret)
|
|
||||||
self.assertIn(
|
|
||||||
r"meshDL='meshsite/meshagents?id=meshid&installflags=2&meshinstall=6'", ret
|
|
||||||
)
|
|
||||||
self.assertIn(r"apiURL='api.example.com'", ret)
|
|
||||||
self.assertIn(r"agentDL='asdasd3423'", ret)
|
|
||||||
self.assertIn(r"token='token123'", ret)
|
|
||||||
self.assertIn(r"clientID='1'", ret)
|
|
||||||
self.assertIn(r"siteID='1'", ret)
|
|
||||||
self.assertIn(r"agentType='server'", ret)
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
from unittest.mock import call, patch
|
|
||||||
|
|
||||||
from django.core.management import call_command
|
|
||||||
from model_bakery import baker
|
|
||||||
|
|
||||||
from tacticalrmm.constants import AgentMonType, AgentPlat
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkRestartAgents(TacticalTestCase):
|
|
||||||
def setUp(self) -> None:
|
|
||||||
self.authenticate()
|
|
||||||
self.setup_coresettings()
|
|
||||||
self.setup_base_instance()
|
|
||||||
|
|
||||||
@patch("core.management.commands.bulk_restart_agents.sleep")
|
|
||||||
@patch("agents.models.Agent.recover")
|
|
||||||
@patch("core.management.commands.bulk_restart_agents.get_mesh_ws_url")
|
|
||||||
def test_bulk_restart_agents_mgmt_cmd(
|
|
||||||
self, get_mesh_ws_url, recover, mock_sleep
|
|
||||||
) -> None:
|
|
||||||
get_mesh_ws_url.return_value = "https://mesh.example.com/test"
|
|
||||||
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site1,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.WINDOWS,
|
|
||||||
)
|
|
||||||
|
|
||||||
baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site3,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.LINUX,
|
|
||||||
)
|
|
||||||
|
|
||||||
calls = [
|
|
||||||
call("tacagent", "https://mesh.example.com/test", wait=False),
|
|
||||||
call("mesh", "", wait=False),
|
|
||||||
]
|
|
||||||
|
|
||||||
call_command("bulk_restart_agents")
|
|
||||||
|
|
||||||
recover.assert_has_calls(calls)
|
|
||||||
mock_sleep.assert_called_with(10)
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
from typing import TYPE_CHECKING
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
from model_bakery import baker
|
|
||||||
|
|
||||||
from tacticalrmm.constants import AgentMonType, AgentPlat
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from clients.models import Client, Site
|
|
||||||
|
|
||||||
|
|
||||||
class TestRecovery(TacticalTestCase):
|
|
||||||
def setUp(self) -> None:
|
|
||||||
self.authenticate()
|
|
||||||
self.setup_coresettings()
|
|
||||||
self.client1: "Client" = baker.make("clients.Client")
|
|
||||||
self.site1: "Site" = baker.make("clients.Site", client=self.client1)
|
|
||||||
|
|
||||||
@patch("agents.models.Agent.recover")
|
|
||||||
@patch("agents.views.get_mesh_ws_url")
|
|
||||||
def test_recover(self, get_mesh_ws_url, recover) -> None:
|
|
||||||
get_mesh_ws_url.return_value = "https://mesh.example.com"
|
|
||||||
|
|
||||||
agent = baker.make_recipe(
|
|
||||||
"agents.online_agent",
|
|
||||||
site=self.site1,
|
|
||||||
monitoring_type=AgentMonType.SERVER,
|
|
||||||
plat=AgentPlat.WINDOWS,
|
|
||||||
)
|
|
||||||
|
|
||||||
url = f"/agents/{agent.agent_id}/recover/"
|
|
||||||
|
|
||||||
# test successfull tacticalagent recovery
|
|
||||||
data = {"mode": "tacagent"}
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
recover.assert_called_with("tacagent", "https://mesh.example.com", wait=False)
|
|
||||||
get_mesh_ws_url.assert_called_once()
|
|
||||||
|
|
||||||
# reset mocks
|
|
||||||
recover.reset_mock()
|
|
||||||
get_mesh_ws_url.reset_mock()
|
|
||||||
|
|
||||||
# test successfull mesh agent recovery
|
|
||||||
data = {"mode": "mesh"}
|
|
||||||
recover.return_value = ("ok", False)
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
get_mesh_ws_url.assert_not_called()
|
|
||||||
recover.assert_called_with("mesh", "")
|
|
||||||
|
|
||||||
# reset mocks
|
|
||||||
recover.reset_mock()
|
|
||||||
get_mesh_ws_url.reset_mock()
|
|
||||||
|
|
||||||
# test failed mesh agent recovery
|
|
||||||
data = {"mode": "mesh"}
|
|
||||||
recover.return_value = ("Unable to contact the agent", True)
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
from autotasks.views import GetAddAutoTasks
|
from autotasks.views import GetAddAutoTasks
|
||||||
from checks.views import GetAddChecks
|
from checks.views import GetAddChecks
|
||||||
|
from django.urls import path
|
||||||
from logs.views import PendingActions
|
from logs.views import PendingActions
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
@@ -15,7 +14,6 @@ urlpatterns = [
|
|||||||
path("<agent:agent_id>/wmi/", views.WMI.as_view()),
|
path("<agent:agent_id>/wmi/", views.WMI.as_view()),
|
||||||
path("<agent:agent_id>/recover/", views.recover),
|
path("<agent:agent_id>/recover/", views.recover),
|
||||||
path("<agent:agent_id>/reboot/", views.Reboot.as_view()),
|
path("<agent:agent_id>/reboot/", views.Reboot.as_view()),
|
||||||
path("<agent:agent_id>/shutdown/", views.Shutdown.as_view()),
|
|
||||||
path("<agent:agent_id>/ping/", views.ping),
|
path("<agent:agent_id>/ping/", views.ping),
|
||||||
# alias for checks get view
|
# alias for checks get view
|
||||||
path("<agent:agent_id>/checks/", GetAddChecks.as_view()),
|
path("<agent:agent_id>/checks/", GetAddChecks.as_view()),
|
||||||
@@ -42,7 +40,4 @@ urlpatterns = [
|
|||||||
path("versions/", views.get_agent_versions),
|
path("versions/", views.get_agent_versions),
|
||||||
path("update/", views.update_agents),
|
path("update/", views.update_agents),
|
||||||
path("installer/", views.install_agent),
|
path("installer/", views.install_agent),
|
||||||
path("bulkrecovery/", views.bulk_agent_recovery),
|
|
||||||
path("scripthistory/", views.ScriptRunHistory.as_view()),
|
|
||||||
path("<agent:agent_id>/wol/", views.wol),
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,28 +1,38 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
import tempfile
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from io import StringIO
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
from core.models import CodeSignToken
|
||||||
|
from core.utils import get_mesh_device_id, get_mesh_ws_url, get_core_settings
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.http import FileResponse
|
from django.http import FileResponse
|
||||||
|
|
||||||
from core.utils import get_core_settings, get_mesh_device_id, get_mesh_ws_url
|
|
||||||
from tacticalrmm.constants import MeshAgentIdent
|
from tacticalrmm.constants import MeshAgentIdent
|
||||||
|
|
||||||
|
|
||||||
def get_agent_url(*, goarch: str, plat: str, token: str = "") -> str:
|
def get_agent_url(arch: str, plat: str) -> str:
|
||||||
ver = settings.LATEST_AGENT_VER
|
|
||||||
if token:
|
|
||||||
params = {
|
|
||||||
"version": ver,
|
|
||||||
"arch": goarch,
|
|
||||||
"token": token,
|
|
||||||
"plat": plat,
|
|
||||||
"api": settings.ALLOWED_HOSTS[0],
|
|
||||||
}
|
|
||||||
return settings.AGENTS_URL + urllib.parse.urlencode(params)
|
|
||||||
|
|
||||||
return f"https://github.com/amidaware/rmmagent/releases/download/v{ver}/tacticalagent-v{ver}-{plat}-{goarch}.exe"
|
if plat == "windows":
|
||||||
|
endpoint = "winagents"
|
||||||
|
dl_url = settings.DL_32 if arch == "32" else settings.DL_64
|
||||||
|
else:
|
||||||
|
endpoint = "linuxagents"
|
||||||
|
dl_url = ""
|
||||||
|
|
||||||
|
token = CodeSignToken.objects.first()
|
||||||
|
if not token:
|
||||||
|
return dl_url
|
||||||
|
|
||||||
|
if token.is_valid:
|
||||||
|
base_url = settings.EXE_GEN_URL + f"/api/v1/{endpoint}/?"
|
||||||
|
params = {
|
||||||
|
"version": settings.LATEST_AGENT_VER,
|
||||||
|
"arch": arch,
|
||||||
|
"token": token.token,
|
||||||
|
}
|
||||||
|
dl_url = base_url + urllib.parse.urlencode(params)
|
||||||
|
|
||||||
|
return dl_url
|
||||||
|
|
||||||
|
|
||||||
def generate_linux_install(
|
def generate_linux_install(
|
||||||
@@ -34,6 +44,7 @@ def generate_linux_install(
|
|||||||
api: str,
|
api: str,
|
||||||
download_url: str,
|
download_url: str,
|
||||||
) -> FileResponse:
|
) -> FileResponse:
|
||||||
|
|
||||||
match arch:
|
match arch:
|
||||||
case "amd64":
|
case "amd64":
|
||||||
arch_id = MeshAgentIdent.LINUX64
|
arch_id = MeshAgentIdent.LINUX64
|
||||||
@@ -51,10 +62,12 @@ def generate_linux_install(
|
|||||||
uri = get_mesh_ws_url()
|
uri = get_mesh_ws_url()
|
||||||
mesh_id = asyncio.run(get_mesh_device_id(uri, core.mesh_device_group))
|
mesh_id = asyncio.run(get_mesh_device_id(uri, core.mesh_device_group))
|
||||||
mesh_dl = (
|
mesh_dl = (
|
||||||
f"{core.mesh_site}/meshagents?id={mesh_id}&installflags=2&meshinstall={arch_id}"
|
f"{core.mesh_site}/meshagents?id={mesh_id}&installflags=0&meshinstall={arch_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
text = Path(settings.LINUX_AGENT_SCRIPT).read_text()
|
sh = settings.LINUX_AGENT_SCRIPT
|
||||||
|
with open(sh, "r") as f:
|
||||||
|
text = f.read()
|
||||||
|
|
||||||
replace = {
|
replace = {
|
||||||
"agentDLChange": download_url,
|
"agentDLChange": download_url,
|
||||||
@@ -69,8 +82,11 @@ def generate_linux_install(
|
|||||||
for i, j in replace.items():
|
for i, j in replace.items():
|
||||||
text = text.replace(i, j)
|
text = text.replace(i, j)
|
||||||
|
|
||||||
text += "\n"
|
with tempfile.NamedTemporaryFile() as fp:
|
||||||
with StringIO(text) as fp:
|
with open(fp.name, "w") as f:
|
||||||
|
f.write(text)
|
||||||
|
f.write("\n")
|
||||||
|
|
||||||
return FileResponse(
|
return FileResponse(
|
||||||
fp.read(), as_attachment=True, filename="linux_agent_install.sh"
|
open(fp.name, "rb"), as_attachment=True, filename="linux_agent_install.sh"
|
||||||
)
|
)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 4.0.3 on 2022-04-07 17:28
|
# Generated by Django 4.0.3 on 2022-04-07 17:28
|
||||||
|
|
||||||
import django.db.models.deletion
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
def delete_alerts_without_agent(apps, schema):
|
def delete_alerts_without_agent(apps, schema):
|
||||||
|
|||||||
@@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 4.0.5 on 2022-06-29 07:57
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('alerts', '0011_alter_alert_agent'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='alert',
|
|
||||||
name='action_retcode',
|
|
||||||
field=models.BigIntegerField(blank=True, null=True),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='alert',
|
|
||||||
name='resolved_action_retcode',
|
|
||||||
field=models.BigIntegerField(blank=True, null=True),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
# Generated by Django 4.1.3 on 2022-11-26 20:22
|
|
||||||
|
|
||||||
import django.contrib.postgres.fields
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("alerts", "0012_alter_alert_action_retcode_and_more"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="alerttemplate",
|
|
||||||
name="action_env_vars",
|
|
||||||
field=django.contrib.postgres.fields.ArrayField(
|
|
||||||
base_field=models.TextField(blank=True, null=True),
|
|
||||||
blank=True,
|
|
||||||
default=list,
|
|
||||||
null=True,
|
|
||||||
size=None,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="alerttemplate",
|
|
||||||
name="resolved_action_env_vars",
|
|
||||||
field=django.contrib.postgres.fields.ArrayField(
|
|
||||||
base_field=models.TextField(blank=True, null=True),
|
|
||||||
blank=True,
|
|
||||||
default=list,
|
|
||||||
null=True,
|
|
||||||
size=None,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
# Generated by Django 4.2.13 on 2024-06-28 20:21
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("core", "0045_coresettings_enable_server_scripts_and_more"),
|
|
||||||
("alerts", "0013_alerttemplate_action_env_vars_and_more"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="alerttemplate",
|
|
||||||
name="action_rest",
|
|
||||||
field=models.ForeignKey(
|
|
||||||
blank=True,
|
|
||||||
null=True,
|
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
|
||||||
related_name="url_action_alert_template",
|
|
||||||
to="core.urlaction",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="alerttemplate",
|
|
||||||
name="action_type",
|
|
||||||
field=models.CharField(
|
|
||||||
choices=[("script", "Script"), ("server", "Server"), ("rest", "Rest")],
|
|
||||||
default="script",
|
|
||||||
max_length=10,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="alerttemplate",
|
|
||||||
name="resolved_action_rest",
|
|
||||||
field=models.ForeignKey(
|
|
||||||
blank=True,
|
|
||||||
null=True,
|
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
|
||||||
related_name="resolved_url_action_alert_template",
|
|
||||||
to="core.urlaction",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="alerttemplate",
|
|
||||||
name="resolved_action_type",
|
|
||||||
field=models.CharField(
|
|
||||||
choices=[("script", "Script"), ("server", "Server"), ("rest", "Rest")],
|
|
||||||
default="script",
|
|
||||||
max_length=10,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,26 +1,15 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast
|
import re
|
||||||
|
from typing import TYPE_CHECKING, Union, Optional, Dict, Any, List, cast
|
||||||
|
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
from core.utils import run_server_script, run_url_rest_action
|
|
||||||
from logs.models import BaseAuditModel, DebugLog
|
from logs.models import BaseAuditModel, DebugLog
|
||||||
from tacticalrmm.constants import (
|
|
||||||
AgentHistoryType,
|
|
||||||
AgentMonType,
|
|
||||||
AlertSeverity,
|
|
||||||
AlertTemplateActionType,
|
|
||||||
AlertType,
|
|
||||||
CheckType,
|
|
||||||
DebugLogType,
|
|
||||||
)
|
|
||||||
from tacticalrmm.logger import logger
|
|
||||||
from tacticalrmm.models import PermissionQuerySet
|
from tacticalrmm.models import PermissionQuerySet
|
||||||
from tacticalrmm.utils import RE_DB_VALUE, get_db_value
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
@@ -29,6 +18,20 @@ if TYPE_CHECKING:
|
|||||||
from clients.models import Client, Site
|
from clients.models import Client, Site
|
||||||
|
|
||||||
|
|
||||||
|
SEVERITY_CHOICES = [
|
||||||
|
("info", "Informational"),
|
||||||
|
("warning", "Warning"),
|
||||||
|
("error", "Error"),
|
||||||
|
]
|
||||||
|
|
||||||
|
ALERT_TYPE_CHOICES = [
|
||||||
|
("availability", "Availability"),
|
||||||
|
("check", "Check"),
|
||||||
|
("task", "Task"),
|
||||||
|
("custom", "Custom"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class Alert(models.Model):
|
class Alert(models.Model):
|
||||||
objects = PermissionQuerySet.as_manager()
|
objects = PermissionQuerySet.as_manager()
|
||||||
|
|
||||||
@@ -54,7 +57,7 @@ class Alert(models.Model):
|
|||||||
blank=True,
|
blank=True,
|
||||||
)
|
)
|
||||||
alert_type = models.CharField(
|
alert_type = models.CharField(
|
||||||
max_length=20, choices=AlertType.choices, default=AlertType.AVAILABILITY
|
max_length=20, choices=ALERT_TYPE_CHOICES, default="availability"
|
||||||
)
|
)
|
||||||
message = models.TextField(null=True, blank=True)
|
message = models.TextField(null=True, blank=True)
|
||||||
alert_time = models.DateTimeField(auto_now_add=True, null=True, blank=True)
|
alert_time = models.DateTimeField(auto_now_add=True, null=True, blank=True)
|
||||||
@@ -62,9 +65,7 @@ class Alert(models.Model):
|
|||||||
snooze_until = models.DateTimeField(null=True, blank=True)
|
snooze_until = models.DateTimeField(null=True, blank=True)
|
||||||
resolved = models.BooleanField(default=False)
|
resolved = models.BooleanField(default=False)
|
||||||
resolved_on = models.DateTimeField(null=True, blank=True)
|
resolved_on = models.DateTimeField(null=True, blank=True)
|
||||||
severity = models.CharField(
|
severity = models.CharField(max_length=30, choices=SEVERITY_CHOICES, default="info")
|
||||||
max_length=30, choices=AlertSeverity.choices, default=AlertSeverity.INFO
|
|
||||||
)
|
|
||||||
email_sent = models.DateTimeField(null=True, blank=True)
|
email_sent = models.DateTimeField(null=True, blank=True)
|
||||||
resolved_email_sent = models.DateTimeField(null=True, blank=True)
|
resolved_email_sent = models.DateTimeField(null=True, blank=True)
|
||||||
sms_sent = models.DateTimeField(null=True, blank=True)
|
sms_sent = models.DateTimeField(null=True, blank=True)
|
||||||
@@ -73,21 +74,21 @@ class Alert(models.Model):
|
|||||||
action_run = models.DateTimeField(null=True, blank=True)
|
action_run = models.DateTimeField(null=True, blank=True)
|
||||||
action_stdout = models.TextField(null=True, blank=True)
|
action_stdout = models.TextField(null=True, blank=True)
|
||||||
action_stderr = models.TextField(null=True, blank=True)
|
action_stderr = models.TextField(null=True, blank=True)
|
||||||
action_retcode = models.BigIntegerField(null=True, blank=True)
|
action_retcode = models.IntegerField(null=True, blank=True)
|
||||||
action_execution_time = models.CharField(max_length=100, null=True, blank=True)
|
action_execution_time = models.CharField(max_length=100, null=True, blank=True)
|
||||||
resolved_action_run = models.DateTimeField(null=True, blank=True)
|
resolved_action_run = models.DateTimeField(null=True, blank=True)
|
||||||
resolved_action_stdout = models.TextField(null=True, blank=True)
|
resolved_action_stdout = models.TextField(null=True, blank=True)
|
||||||
resolved_action_stderr = models.TextField(null=True, blank=True)
|
resolved_action_stderr = models.TextField(null=True, blank=True)
|
||||||
resolved_action_retcode = models.BigIntegerField(null=True, blank=True)
|
resolved_action_retcode = models.IntegerField(null=True, blank=True)
|
||||||
resolved_action_execution_time = models.CharField(
|
resolved_action_execution_time = models.CharField(
|
||||||
max_length=100, null=True, blank=True
|
max_length=100, null=True, blank=True
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"{self.alert_type} - {self.message}"
|
return self.message
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def assigned_agent(self) -> "Optional[Agent]":
|
def assigned_agent(self) -> "Agent":
|
||||||
return self.agent
|
return self.agent
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -98,15 +99,6 @@ class Alert(models.Model):
|
|||||||
def client(self) -> "Client":
|
def client(self) -> "Client":
|
||||||
return self.agent.client
|
return self.agent.client
|
||||||
|
|
||||||
@property
|
|
||||||
def get_result(self):
|
|
||||||
if self.alert_type == AlertType.CHECK:
|
|
||||||
return self.assigned_check.checkresults.get(agent=self.agent)
|
|
||||||
elif self.alert_type == AlertType.TASK:
|
|
||||||
return self.assigned_task.taskresults.get(agent=self.agent)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def resolve(self) -> None:
|
def resolve(self) -> None:
|
||||||
self.resolved = True
|
self.resolved = True
|
||||||
self.resolved_on = djangotime.now()
|
self.resolved_on = djangotime.now()
|
||||||
@@ -118,11 +110,8 @@ class Alert(models.Model):
|
|||||||
def create_or_return_availability_alert(
|
def create_or_return_availability_alert(
|
||||||
cls, agent: Agent, skip_create: bool = False
|
cls, agent: Agent, skip_create: bool = False
|
||||||
) -> Optional[Alert]:
|
) -> Optional[Alert]:
|
||||||
if agent.maintenance_mode:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not cls.objects.filter(
|
if not cls.objects.filter(
|
||||||
agent=agent, alert_type=AlertType.AVAILABILITY, resolved=False
|
agent=agent, alert_type="availability", resolved=False
|
||||||
).exists():
|
).exists():
|
||||||
if skip_create:
|
if skip_create:
|
||||||
return None
|
return None
|
||||||
@@ -131,9 +120,9 @@ class Alert(models.Model):
|
|||||||
Alert,
|
Alert,
|
||||||
cls.objects.create(
|
cls.objects.create(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
alert_type=AlertType.AVAILABILITY,
|
alert_type="availability",
|
||||||
severity=AlertSeverity.ERROR,
|
severity="error",
|
||||||
message=f"{agent.hostname} in {agent.client.name}, {agent.site.name} is overdue.",
|
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
|
||||||
hidden=True,
|
hidden=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@@ -142,12 +131,12 @@ class Alert(models.Model):
|
|||||||
return cast(
|
return cast(
|
||||||
Alert,
|
Alert,
|
||||||
cls.objects.get(
|
cls.objects.get(
|
||||||
agent=agent, alert_type=AlertType.AVAILABILITY, resolved=False
|
agent=agent, alert_type="availability", resolved=False
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
except cls.MultipleObjectsReturned:
|
except cls.MultipleObjectsReturned:
|
||||||
alerts = cls.objects.filter(
|
alerts = cls.objects.filter(
|
||||||
agent=agent, alert_type=AlertType.AVAILABILITY, resolved=False
|
agent=agent, alert_type="availability", resolved=False
|
||||||
)
|
)
|
||||||
|
|
||||||
last_alert = cast(Alert, alerts.last())
|
last_alert = cast(Alert, alerts.last())
|
||||||
@@ -169,8 +158,6 @@ class Alert(models.Model):
|
|||||||
alert_severity: Optional[str] = None,
|
alert_severity: Optional[str] = None,
|
||||||
skip_create: bool = False,
|
skip_create: bool = False,
|
||||||
) -> "Optional[Alert]":
|
) -> "Optional[Alert]":
|
||||||
if agent.maintenance_mode:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# need to pass agent if the check is a policy
|
# need to pass agent if the check is a policy
|
||||||
if not cls.objects.filter(
|
if not cls.objects.filter(
|
||||||
@@ -186,18 +173,11 @@ class Alert(models.Model):
|
|||||||
cls.objects.create(
|
cls.objects.create(
|
||||||
assigned_check=check,
|
assigned_check=check,
|
||||||
agent=agent,
|
agent=agent,
|
||||||
alert_type=AlertType.CHECK,
|
alert_type="check",
|
||||||
severity=(
|
severity=check.alert_severity
|
||||||
check.alert_severity
|
|
||||||
if check.check_type
|
if check.check_type
|
||||||
not in {
|
not in ["memory", "cpuload", "diskspace", "script"]
|
||||||
CheckType.MEMORY,
|
else alert_severity,
|
||||||
CheckType.CPU_LOAD,
|
|
||||||
CheckType.DISK_SPACE,
|
|
||||||
CheckType.SCRIPT,
|
|
||||||
}
|
|
||||||
else alert_severity
|
|
||||||
),
|
|
||||||
message=f"{agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
message=f"{agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
||||||
hidden=True,
|
hidden=True,
|
||||||
),
|
),
|
||||||
@@ -236,8 +216,6 @@ class Alert(models.Model):
|
|||||||
agent: "Agent",
|
agent: "Agent",
|
||||||
skip_create: bool = False,
|
skip_create: bool = False,
|
||||||
) -> "Optional[Alert]":
|
) -> "Optional[Alert]":
|
||||||
if agent.maintenance_mode:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not cls.objects.filter(
|
if not cls.objects.filter(
|
||||||
assigned_task=task,
|
assigned_task=task,
|
||||||
@@ -252,7 +230,7 @@ class Alert(models.Model):
|
|||||||
cls.objects.create(
|
cls.objects.create(
|
||||||
assigned_task=task,
|
assigned_task=task,
|
||||||
agent=agent,
|
agent=agent,
|
||||||
alert_type=AlertType.TASK,
|
alert_type="task",
|
||||||
severity=task.alert_severity,
|
severity=task.alert_severity,
|
||||||
message=f"{agent.hostname} has task: {task.name} that failed.",
|
message=f"{agent.hostname} has task: {task.name} that failed.",
|
||||||
hidden=True,
|
hidden=True,
|
||||||
@@ -290,12 +268,10 @@ class Alert(models.Model):
|
|||||||
def handle_alert_failure(
|
def handle_alert_failure(
|
||||||
cls, instance: Union[Agent, TaskResult, CheckResult]
|
cls, instance: Union[Agent, TaskResult, CheckResult]
|
||||||
) -> None:
|
) -> None:
|
||||||
from agents.models import Agent, AgentHistory
|
from agents.models import Agent
|
||||||
from autotasks.models import TaskResult
|
from autotasks.models import TaskResult
|
||||||
from checks.models import CheckResult
|
from checks.models import CheckResult
|
||||||
from core.models import CoreSettings
|
|
||||||
|
|
||||||
core = CoreSettings.objects.first()
|
|
||||||
# set variables
|
# set variables
|
||||||
dashboard_severities = None
|
dashboard_severities = None
|
||||||
email_severities = None
|
email_severities = None
|
||||||
@@ -306,7 +282,7 @@ class Alert(models.Model):
|
|||||||
alert_interval = None
|
alert_interval = None
|
||||||
email_task = None
|
email_task = None
|
||||||
text_task = None
|
text_task = None
|
||||||
should_run_script_or_webhook = False
|
run_script_action = None
|
||||||
|
|
||||||
# check what the instance passed is
|
# check what the instance passed is
|
||||||
if isinstance(instance, Agent):
|
if isinstance(instance, Agent):
|
||||||
@@ -320,19 +296,19 @@ class Alert(models.Model):
|
|||||||
dashboard_alert = instance.overdue_dashboard_alert
|
dashboard_alert = instance.overdue_dashboard_alert
|
||||||
alert_template = instance.alert_template
|
alert_template = instance.alert_template
|
||||||
maintenance_mode = instance.maintenance_mode
|
maintenance_mode = instance.maintenance_mode
|
||||||
alert_severity = AlertSeverity.ERROR
|
alert_severity = "error"
|
||||||
agent = instance
|
agent = instance
|
||||||
dashboard_severities = [AlertSeverity.ERROR]
|
|
||||||
email_severities = [AlertSeverity.ERROR]
|
|
||||||
text_severities = [AlertSeverity.ERROR]
|
|
||||||
|
|
||||||
# set alert_template settings
|
# set alert_template settings
|
||||||
if alert_template:
|
if alert_template:
|
||||||
|
dashboard_severities = ["error"]
|
||||||
|
email_severities = ["error"]
|
||||||
|
text_severities = ["error"]
|
||||||
always_dashboard = alert_template.agent_always_alert
|
always_dashboard = alert_template.agent_always_alert
|
||||||
always_email = alert_template.agent_always_email
|
always_email = alert_template.agent_always_email
|
||||||
always_text = alert_template.agent_always_text
|
always_text = alert_template.agent_always_text
|
||||||
alert_interval = alert_template.agent_periodic_alert_days
|
alert_interval = alert_template.agent_periodic_alert_days
|
||||||
should_run_script_or_webhook = alert_template.agent_script_actions
|
run_script_action = alert_template.agent_script_actions
|
||||||
|
|
||||||
elif isinstance(instance, CheckResult):
|
elif isinstance(instance, CheckResult):
|
||||||
from checks.tasks import (
|
from checks.tasks import (
|
||||||
@@ -351,39 +327,21 @@ class Alert(models.Model):
|
|||||||
alert_severity = (
|
alert_severity = (
|
||||||
instance.assigned_check.alert_severity
|
instance.assigned_check.alert_severity
|
||||||
if instance.assigned_check.check_type
|
if instance.assigned_check.check_type
|
||||||
not in {
|
not in ["memory", "cpuload", "diskspace", "script"]
|
||||||
CheckType.MEMORY,
|
|
||||||
CheckType.CPU_LOAD,
|
|
||||||
CheckType.DISK_SPACE,
|
|
||||||
CheckType.SCRIPT,
|
|
||||||
}
|
|
||||||
else instance.alert_severity
|
else instance.alert_severity
|
||||||
)
|
)
|
||||||
agent = instance.agent
|
agent = instance.agent
|
||||||
|
|
||||||
# set alert_template settings
|
# set alert_template settings
|
||||||
if alert_template:
|
if alert_template:
|
||||||
dashboard_severities = (
|
dashboard_severities = alert_template.check_dashboard_alert_severity
|
||||||
alert_template.check_dashboard_alert_severity
|
email_severities = alert_template.check_email_alert_severity
|
||||||
or [
|
text_severities = alert_template.check_text_alert_severity
|
||||||
AlertSeverity.ERROR,
|
|
||||||
AlertSeverity.WARNING,
|
|
||||||
AlertSeverity.INFO,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
email_severities = alert_template.check_email_alert_severity or [
|
|
||||||
AlertSeverity.ERROR,
|
|
||||||
AlertSeverity.WARNING,
|
|
||||||
]
|
|
||||||
text_severities = alert_template.check_text_alert_severity or [
|
|
||||||
AlertSeverity.ERROR,
|
|
||||||
AlertSeverity.WARNING,
|
|
||||||
]
|
|
||||||
always_dashboard = alert_template.check_always_alert
|
always_dashboard = alert_template.check_always_alert
|
||||||
always_email = alert_template.check_always_email
|
always_email = alert_template.check_always_email
|
||||||
always_text = alert_template.check_always_text
|
always_text = alert_template.check_always_text
|
||||||
alert_interval = alert_template.check_periodic_alert_days
|
alert_interval = alert_template.check_periodic_alert_days
|
||||||
should_run_script_or_webhook = alert_template.check_script_actions
|
run_script_action = alert_template.check_script_actions
|
||||||
|
|
||||||
elif isinstance(instance, TaskResult):
|
elif isinstance(instance, TaskResult):
|
||||||
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
|
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
|
||||||
@@ -401,23 +359,14 @@ class Alert(models.Model):
|
|||||||
|
|
||||||
# set alert_template settings
|
# set alert_template settings
|
||||||
if alert_template:
|
if alert_template:
|
||||||
dashboard_severities = alert_template.task_dashboard_alert_severity or [
|
dashboard_severities = alert_template.task_dashboard_alert_severity
|
||||||
AlertSeverity.ERROR,
|
email_severities = alert_template.task_email_alert_severity
|
||||||
AlertSeverity.WARNING,
|
text_severities = alert_template.task_text_alert_severity
|
||||||
]
|
|
||||||
email_severities = alert_template.task_email_alert_severity or [
|
|
||||||
AlertSeverity.ERROR,
|
|
||||||
AlertSeverity.WARNING,
|
|
||||||
]
|
|
||||||
text_severities = alert_template.task_text_alert_severity or [
|
|
||||||
AlertSeverity.ERROR,
|
|
||||||
AlertSeverity.WARNING,
|
|
||||||
]
|
|
||||||
always_dashboard = alert_template.task_always_alert
|
always_dashboard = alert_template.task_always_alert
|
||||||
always_email = alert_template.task_always_email
|
always_email = alert_template.task_always_email
|
||||||
always_text = alert_template.task_always_text
|
always_text = alert_template.task_always_text
|
||||||
alert_interval = alert_template.task_periodic_alert_days
|
alert_interval = alert_template.task_periodic_alert_days
|
||||||
should_run_script_or_webhook = alert_template.task_script_actions
|
run_script_action = alert_template.task_script_actions
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
@@ -435,6 +384,7 @@ class Alert(models.Model):
|
|||||||
|
|
||||||
# create alert in dashboard if enabled
|
# create alert in dashboard if enabled
|
||||||
if dashboard_alert or always_dashboard:
|
if dashboard_alert or always_dashboard:
|
||||||
|
|
||||||
# check if alert template is set and specific severities are configured
|
# check if alert template is set and specific severities are configured
|
||||||
if (
|
if (
|
||||||
not alert_template
|
not alert_template
|
||||||
@@ -445,23 +395,13 @@ class Alert(models.Model):
|
|||||||
alert.hidden = False
|
alert.hidden = False
|
||||||
alert.save(update_fields=["hidden"])
|
alert.save(update_fields=["hidden"])
|
||||||
|
|
||||||
# TODO rework this
|
|
||||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
|
||||||
email_alert = False
|
|
||||||
always_email = False
|
|
||||||
|
|
||||||
elif (
|
|
||||||
alert.severity == AlertSeverity.WARNING
|
|
||||||
and not core.notify_on_warning_alerts
|
|
||||||
):
|
|
||||||
email_alert = False
|
|
||||||
always_email = False
|
|
||||||
|
|
||||||
# send email if enabled
|
# send email if enabled
|
||||||
if email_alert or always_email:
|
if email_alert or always_email:
|
||||||
|
|
||||||
# check if alert template is set and specific severities are configured
|
# check if alert template is set and specific severities are configured
|
||||||
if not alert_template or (
|
if (
|
||||||
alert_template
|
not alert_template
|
||||||
|
or alert_template
|
||||||
and email_severities
|
and email_severities
|
||||||
and alert.severity in email_severities
|
and alert.severity in email_severities
|
||||||
):
|
):
|
||||||
@@ -470,89 +410,33 @@ class Alert(models.Model):
|
|||||||
alert_interval=alert_interval,
|
alert_interval=alert_interval,
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO rework this
|
|
||||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
|
||||||
text_alert = False
|
|
||||||
always_text = False
|
|
||||||
elif (
|
|
||||||
alert.severity == AlertSeverity.WARNING
|
|
||||||
and not core.notify_on_warning_alerts
|
|
||||||
):
|
|
||||||
text_alert = False
|
|
||||||
always_text = False
|
|
||||||
|
|
||||||
# send text if enabled
|
# send text if enabled
|
||||||
if text_alert or always_text:
|
if text_alert or always_text:
|
||||||
|
|
||||||
# check if alert template is set and specific severities are configured
|
# check if alert template is set and specific severities are configured
|
||||||
if not alert_template or (
|
if (
|
||||||
alert_template and text_severities and alert.severity in text_severities
|
not alert_template
|
||||||
|
or alert_template
|
||||||
|
and text_severities
|
||||||
|
and alert.severity in text_severities
|
||||||
):
|
):
|
||||||
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||||
|
|
||||||
# check if any scripts/webhooks should be run
|
# check if any scripts should be run
|
||||||
if alert_template and not alert.action_run and should_run_script_or_webhook:
|
|
||||||
if (
|
if (
|
||||||
alert_template.action_type == AlertTemplateActionType.SCRIPT
|
alert_template
|
||||||
and alert_template.action
|
and alert_template.action
|
||||||
|
and run_script_action
|
||||||
|
and not alert.action_run
|
||||||
):
|
):
|
||||||
hist = AgentHistory.objects.create(
|
|
||||||
agent=agent,
|
|
||||||
type=AgentHistoryType.SCRIPT_RUN,
|
|
||||||
script=alert_template.action,
|
|
||||||
username="alert-action-failure",
|
|
||||||
)
|
|
||||||
r = agent.run_script(
|
r = agent.run_script(
|
||||||
scriptpk=alert_template.action.pk,
|
scriptpk=alert_template.action.pk,
|
||||||
args=alert.parse_script_args(alert_template.action_args),
|
args=alert.parse_script_args(alert_template.action_args),
|
||||||
timeout=alert_template.action_timeout,
|
timeout=alert_template.action_timeout,
|
||||||
wait=True,
|
wait=True,
|
||||||
history_pk=hist.pk,
|
|
||||||
full=True,
|
full=True,
|
||||||
run_on_any=True,
|
run_on_any=True,
|
||||||
run_as_user=False,
|
|
||||||
env_vars=alert.parse_script_args(alert_template.action_env_vars),
|
|
||||||
)
|
)
|
||||||
elif (
|
|
||||||
alert_template.action_type == AlertTemplateActionType.SERVER
|
|
||||||
and alert_template.action
|
|
||||||
):
|
|
||||||
stdout, stderr, execution_time, retcode = run_server_script(
|
|
||||||
body=alert_template.action.script_body,
|
|
||||||
args=alert.parse_script_args(alert_template.action_args),
|
|
||||||
timeout=alert_template.action_timeout,
|
|
||||||
env_vars=alert.parse_script_args(alert_template.action_env_vars),
|
|
||||||
shell=alert_template.action.shell,
|
|
||||||
)
|
|
||||||
|
|
||||||
r = {
|
|
||||||
"retcode": retcode,
|
|
||||||
"stdout": stdout,
|
|
||||||
"stderr": stderr,
|
|
||||||
"execution_time": execution_time,
|
|
||||||
}
|
|
||||||
|
|
||||||
elif alert_template.action_type == AlertTemplateActionType.REST:
|
|
||||||
if (
|
|
||||||
alert.severity == AlertSeverity.INFO
|
|
||||||
and not core.notify_on_info_alerts
|
|
||||||
or alert.severity == AlertSeverity.WARNING
|
|
||||||
and not core.notify_on_warning_alerts
|
|
||||||
):
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
output, status = run_url_rest_action(
|
|
||||||
action_id=alert_template.action_rest.id, instance=alert
|
|
||||||
)
|
|
||||||
logger.debug(f"{output=} {status=}")
|
|
||||||
|
|
||||||
r = {
|
|
||||||
"stdout": output,
|
|
||||||
"stderr": "",
|
|
||||||
"execution_time": 0,
|
|
||||||
"retcode": status,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
# command was successful
|
# command was successful
|
||||||
if isinstance(r, dict):
|
if isinstance(r, dict):
|
||||||
@@ -563,37 +447,26 @@ class Alert(models.Model):
|
|||||||
alert.action_run = djangotime.now()
|
alert.action_run = djangotime.now()
|
||||||
alert.save()
|
alert.save()
|
||||||
else:
|
else:
|
||||||
if alert_template.action_type == AlertTemplateActionType.SCRIPT:
|
|
||||||
DebugLog.error(
|
DebugLog.error(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
log_type=DebugLogType.SCRIPTING,
|
log_type="scripting",
|
||||||
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
|
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
DebugLog.error(
|
|
||||||
log_type=DebugLogType.SCRIPTING,
|
|
||||||
message=f"Failure action: {alert_template.action.name} failed to run on server for failure alert",
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def handle_alert_resolve(
|
def handle_alert_resolve(
|
||||||
cls, instance: Union[Agent, TaskResult, CheckResult]
|
cls, instance: Union[Agent, TaskResult, CheckResult]
|
||||||
) -> None:
|
) -> None:
|
||||||
from agents.models import Agent, AgentHistory
|
from agents.models import Agent
|
||||||
from autotasks.models import TaskResult
|
from autotasks.models import TaskResult
|
||||||
from checks.models import CheckResult
|
from checks.models import CheckResult
|
||||||
from core.models import CoreSettings
|
|
||||||
|
|
||||||
core = CoreSettings.objects.first()
|
|
||||||
|
|
||||||
# set variables
|
# set variables
|
||||||
email_severities = None
|
|
||||||
text_severities = None
|
|
||||||
email_on_resolved = False
|
email_on_resolved = False
|
||||||
text_on_resolved = False
|
text_on_resolved = False
|
||||||
resolved_email_task = None
|
resolved_email_task = None
|
||||||
resolved_text_task = None
|
resolved_text_task = None
|
||||||
should_run_script_or_webhook = False
|
run_script_action = None
|
||||||
|
|
||||||
# check what the instance passed is
|
# check what the instance passed is
|
||||||
if isinstance(instance, Agent):
|
if isinstance(instance, Agent):
|
||||||
@@ -609,9 +482,7 @@ class Alert(models.Model):
|
|||||||
if alert_template:
|
if alert_template:
|
||||||
email_on_resolved = alert_template.agent_email_on_resolved
|
email_on_resolved = alert_template.agent_email_on_resolved
|
||||||
text_on_resolved = alert_template.agent_text_on_resolved
|
text_on_resolved = alert_template.agent_text_on_resolved
|
||||||
should_run_script_or_webhook = alert_template.agent_script_actions
|
run_script_action = alert_template.agent_script_actions
|
||||||
email_severities = [AlertSeverity.ERROR]
|
|
||||||
text_severities = [AlertSeverity.ERROR]
|
|
||||||
|
|
||||||
if agent.overdue_email_alert:
|
if agent.overdue_email_alert:
|
||||||
email_on_resolved = True
|
email_on_resolved = True
|
||||||
@@ -634,15 +505,7 @@ class Alert(models.Model):
|
|||||||
if alert_template:
|
if alert_template:
|
||||||
email_on_resolved = alert_template.check_email_on_resolved
|
email_on_resolved = alert_template.check_email_on_resolved
|
||||||
text_on_resolved = alert_template.check_text_on_resolved
|
text_on_resolved = alert_template.check_text_on_resolved
|
||||||
should_run_script_or_webhook = alert_template.check_script_actions
|
run_script_action = alert_template.check_script_actions
|
||||||
email_severities = alert_template.check_email_alert_severity or [
|
|
||||||
AlertSeverity.ERROR,
|
|
||||||
AlertSeverity.WARNING,
|
|
||||||
]
|
|
||||||
text_severities = alert_template.check_text_alert_severity or [
|
|
||||||
AlertSeverity.ERROR,
|
|
||||||
AlertSeverity.WARNING,
|
|
||||||
]
|
|
||||||
|
|
||||||
elif isinstance(instance, TaskResult):
|
elif isinstance(instance, TaskResult):
|
||||||
from autotasks.tasks import (
|
from autotasks.tasks import (
|
||||||
@@ -660,15 +523,7 @@ class Alert(models.Model):
|
|||||||
if alert_template:
|
if alert_template:
|
||||||
email_on_resolved = alert_template.task_email_on_resolved
|
email_on_resolved = alert_template.task_email_on_resolved
|
||||||
text_on_resolved = alert_template.task_text_on_resolved
|
text_on_resolved = alert_template.task_text_on_resolved
|
||||||
should_run_script_or_webhook = alert_template.task_script_actions
|
run_script_action = alert_template.task_script_actions
|
||||||
email_severities = alert_template.task_email_alert_severity or [
|
|
||||||
AlertSeverity.ERROR,
|
|
||||||
AlertSeverity.WARNING,
|
|
||||||
]
|
|
||||||
text_severities = alert_template.task_text_alert_severity or [
|
|
||||||
AlertSeverity.ERROR,
|
|
||||||
AlertSeverity.WARNING,
|
|
||||||
]
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
@@ -683,103 +538,27 @@ class Alert(models.Model):
|
|||||||
|
|
||||||
# check if a resolved email notification should be send
|
# check if a resolved email notification should be send
|
||||||
if email_on_resolved and not alert.resolved_email_sent:
|
if email_on_resolved and not alert.resolved_email_sent:
|
||||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
|
||||||
pass
|
|
||||||
|
|
||||||
elif (
|
|
||||||
alert.severity == AlertSeverity.WARNING
|
|
||||||
and not core.notify_on_warning_alerts
|
|
||||||
):
|
|
||||||
pass
|
|
||||||
elif email_severities and alert.severity not in email_severities:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
resolved_email_task.delay(pk=alert.pk)
|
resolved_email_task.delay(pk=alert.pk)
|
||||||
|
|
||||||
# check if resolved text should be sent
|
# check if resolved text should be sent
|
||||||
if text_on_resolved and not alert.resolved_sms_sent:
|
if text_on_resolved and not alert.resolved_sms_sent:
|
||||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
|
||||||
pass
|
|
||||||
|
|
||||||
elif (
|
|
||||||
alert.severity == AlertSeverity.WARNING
|
|
||||||
and not core.notify_on_warning_alerts
|
|
||||||
):
|
|
||||||
pass
|
|
||||||
elif text_severities and alert.severity not in text_severities:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
resolved_text_task.delay(pk=alert.pk)
|
resolved_text_task.delay(pk=alert.pk)
|
||||||
|
|
||||||
# check if resolved script/webhook should be run
|
# check if resolved script should be run
|
||||||
if (
|
if (
|
||||||
alert_template
|
alert_template
|
||||||
and not alert.resolved_action_run
|
|
||||||
and should_run_script_or_webhook
|
|
||||||
):
|
|
||||||
if (
|
|
||||||
alert_template.resolved_action_type == AlertTemplateActionType.SCRIPT
|
|
||||||
and alert_template.resolved_action
|
and alert_template.resolved_action
|
||||||
|
and run_script_action
|
||||||
|
and not alert.resolved_action_run
|
||||||
):
|
):
|
||||||
hist = AgentHistory.objects.create(
|
|
||||||
agent=agent,
|
|
||||||
type=AgentHistoryType.SCRIPT_RUN,
|
|
||||||
script=alert_template.resolved_action,
|
|
||||||
username="alert-action-resolved",
|
|
||||||
)
|
|
||||||
r = agent.run_script(
|
r = agent.run_script(
|
||||||
scriptpk=alert_template.resolved_action.pk,
|
scriptpk=alert_template.resolved_action.pk,
|
||||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||||
timeout=alert_template.resolved_action_timeout,
|
timeout=alert_template.resolved_action_timeout,
|
||||||
wait=True,
|
wait=True,
|
||||||
history_pk=hist.pk,
|
|
||||||
full=True,
|
full=True,
|
||||||
run_on_any=True,
|
run_on_any=True,
|
||||||
run_as_user=False,
|
|
||||||
env_vars=alert_template.resolved_action_env_vars,
|
|
||||||
)
|
)
|
||||||
elif (
|
|
||||||
alert_template.resolved_action_type == AlertTemplateActionType.SERVER
|
|
||||||
and alert_template.resolved_action
|
|
||||||
):
|
|
||||||
stdout, stderr, execution_time, retcode = run_server_script(
|
|
||||||
body=alert_template.resolved_action.script_body,
|
|
||||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
|
||||||
timeout=alert_template.resolved_action_timeout,
|
|
||||||
env_vars=alert.parse_script_args(
|
|
||||||
alert_template.resolved_action_env_vars
|
|
||||||
),
|
|
||||||
shell=alert_template.resolved_action.shell,
|
|
||||||
)
|
|
||||||
r = {
|
|
||||||
"stdout": stdout,
|
|
||||||
"stderr": stderr,
|
|
||||||
"execution_time": execution_time,
|
|
||||||
"retcode": retcode,
|
|
||||||
}
|
|
||||||
|
|
||||||
elif alert_template.action_type == AlertTemplateActionType.REST:
|
|
||||||
if (
|
|
||||||
alert.severity == AlertSeverity.INFO
|
|
||||||
and not core.notify_on_info_alerts
|
|
||||||
or alert.severity == AlertSeverity.WARNING
|
|
||||||
and not core.notify_on_warning_alerts
|
|
||||||
):
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
output, status = run_url_rest_action(
|
|
||||||
action_id=alert_template.resolved_action_rest.id, instance=alert
|
|
||||||
)
|
|
||||||
logger.debug(f"{output=} {status=}")
|
|
||||||
|
|
||||||
r = {
|
|
||||||
"stdout": output,
|
|
||||||
"stderr": "",
|
|
||||||
"execution_time": 0,
|
|
||||||
"retcode": status,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
# command was successful
|
# command was successful
|
||||||
if isinstance(r, dict):
|
if isinstance(r, dict):
|
||||||
@@ -792,36 +571,40 @@ class Alert(models.Model):
|
|||||||
alert.resolved_action_run = djangotime.now()
|
alert.resolved_action_run = djangotime.now()
|
||||||
alert.save()
|
alert.save()
|
||||||
else:
|
else:
|
||||||
if (
|
|
||||||
alert_template.resolved_action_type
|
|
||||||
== AlertTemplateActionType.SCRIPT
|
|
||||||
):
|
|
||||||
DebugLog.error(
|
DebugLog.error(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
log_type=DebugLogType.SCRIPTING,
|
log_type="scripting",
|
||||||
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
DebugLog.error(
|
|
||||||
log_type=DebugLogType.SCRIPTING,
|
|
||||||
message=f"Resolved action: {alert_template.action.name} failed to run on server for resolved alert",
|
|
||||||
)
|
|
||||||
|
|
||||||
def parse_script_args(self, args: List[str]) -> List[str]:
|
def parse_script_args(self, args: List[str]) -> List[str]:
|
||||||
|
|
||||||
if not args:
|
if not args:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
temp_args = []
|
temp_args = list()
|
||||||
|
# pattern to match for injection
|
||||||
|
pattern = re.compile(".*\\{\\{alert\\.(.*)\\}\\}.*")
|
||||||
|
|
||||||
for arg in args:
|
for arg in args:
|
||||||
temp_arg = arg
|
match = pattern.match(arg)
|
||||||
for string, model, prop in RE_DB_VALUE.findall(arg):
|
if match:
|
||||||
value = get_db_value(string=f"{model}.{prop}", instance=self)
|
name = match.group(1)
|
||||||
|
|
||||||
if value is not None:
|
# check if attr exists and isn't a function
|
||||||
temp_arg = temp_arg.replace(string, f"'{str(value)}'")
|
if hasattr(self, name) and not callable(getattr(self, name)):
|
||||||
|
value = f"'{getattr(self, name)}'"
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
temp_args.append(temp_arg)
|
try:
|
||||||
|
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg))
|
||||||
|
except Exception as e:
|
||||||
|
DebugLog.error(log_type="scripting", message=str(e))
|
||||||
|
continue
|
||||||
|
|
||||||
|
else:
|
||||||
|
temp_args.append(arg)
|
||||||
|
|
||||||
return temp_args
|
return temp_args
|
||||||
|
|
||||||
@@ -830,11 +613,6 @@ class AlertTemplate(BaseAuditModel):
|
|||||||
name = models.CharField(max_length=100)
|
name = models.CharField(max_length=100)
|
||||||
is_active = models.BooleanField(default=True)
|
is_active = models.BooleanField(default=True)
|
||||||
|
|
||||||
action_type = models.CharField(
|
|
||||||
max_length=10,
|
|
||||||
choices=AlertTemplateActionType.choices,
|
|
||||||
default=AlertTemplateActionType.SCRIPT,
|
|
||||||
)
|
|
||||||
action = models.ForeignKey(
|
action = models.ForeignKey(
|
||||||
"scripts.Script",
|
"scripts.Script",
|
||||||
related_name="alert_template",
|
related_name="alert_template",
|
||||||
@@ -842,31 +620,13 @@ class AlertTemplate(BaseAuditModel):
|
|||||||
null=True,
|
null=True,
|
||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
)
|
)
|
||||||
action_rest = models.ForeignKey(
|
|
||||||
"core.URLAction",
|
|
||||||
related_name="url_action_alert_template",
|
|
||||||
blank=True,
|
|
||||||
null=True,
|
|
||||||
on_delete=models.SET_NULL,
|
|
||||||
)
|
|
||||||
action_args = ArrayField(
|
action_args = ArrayField(
|
||||||
models.CharField(max_length=255, null=True, blank=True),
|
models.CharField(max_length=255, null=True, blank=True),
|
||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
default=list,
|
default=list,
|
||||||
)
|
)
|
||||||
action_env_vars = ArrayField(
|
|
||||||
models.TextField(null=True, blank=True),
|
|
||||||
null=True,
|
|
||||||
blank=True,
|
|
||||||
default=list,
|
|
||||||
)
|
|
||||||
action_timeout = models.PositiveIntegerField(default=15)
|
action_timeout = models.PositiveIntegerField(default=15)
|
||||||
resolved_action_type = models.CharField(
|
|
||||||
max_length=10,
|
|
||||||
choices=AlertTemplateActionType.choices,
|
|
||||||
default=AlertTemplateActionType.SCRIPT,
|
|
||||||
)
|
|
||||||
resolved_action = models.ForeignKey(
|
resolved_action = models.ForeignKey(
|
||||||
"scripts.Script",
|
"scripts.Script",
|
||||||
related_name="resolved_alert_template",
|
related_name="resolved_alert_template",
|
||||||
@@ -874,25 +634,12 @@ class AlertTemplate(BaseAuditModel):
|
|||||||
null=True,
|
null=True,
|
||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
)
|
)
|
||||||
resolved_action_rest = models.ForeignKey(
|
|
||||||
"core.URLAction",
|
|
||||||
related_name="resolved_url_action_alert_template",
|
|
||||||
blank=True,
|
|
||||||
null=True,
|
|
||||||
on_delete=models.SET_NULL,
|
|
||||||
)
|
|
||||||
resolved_action_args = ArrayField(
|
resolved_action_args = ArrayField(
|
||||||
models.CharField(max_length=255, null=True, blank=True),
|
models.CharField(max_length=255, null=True, blank=True),
|
||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
default=list,
|
default=list,
|
||||||
)
|
)
|
||||||
resolved_action_env_vars = ArrayField(
|
|
||||||
models.TextField(null=True, blank=True),
|
|
||||||
null=True,
|
|
||||||
blank=True,
|
|
||||||
default=list,
|
|
||||||
)
|
|
||||||
resolved_action_timeout = models.PositiveIntegerField(default=15)
|
resolved_action_timeout = models.PositiveIntegerField(default=15)
|
||||||
|
|
||||||
# overrides the global recipients
|
# overrides the global recipients
|
||||||
@@ -919,22 +666,21 @@ class AlertTemplate(BaseAuditModel):
|
|||||||
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
||||||
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||||
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||||
# fmt: off
|
agent_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||||
agent_script_actions = BooleanField(null=True, blank=True, default=True) # should be renamed because also deals with webhooks
|
|
||||||
|
|
||||||
# check alert settings
|
# check alert settings
|
||||||
check_email_alert_severity = ArrayField(
|
check_email_alert_severity = ArrayField(
|
||||||
models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices),
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
blank=True,
|
blank=True,
|
||||||
default=list,
|
default=list,
|
||||||
)
|
)
|
||||||
check_text_alert_severity = ArrayField(
|
check_text_alert_severity = ArrayField(
|
||||||
models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices),
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
blank=True,
|
blank=True,
|
||||||
default=list,
|
default=list,
|
||||||
)
|
)
|
||||||
check_dashboard_alert_severity = ArrayField(
|
check_dashboard_alert_severity = ArrayField(
|
||||||
models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices),
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
blank=True,
|
blank=True,
|
||||||
default=list,
|
default=list,
|
||||||
)
|
)
|
||||||
@@ -944,22 +690,21 @@ class AlertTemplate(BaseAuditModel):
|
|||||||
check_always_text = BooleanField(null=True, blank=True, default=None)
|
check_always_text = BooleanField(null=True, blank=True, default=None)
|
||||||
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||||
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||||
# fmt: off
|
check_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||||
check_script_actions = BooleanField(null=True, blank=True, default=True) # should be renamed because also deals with webhooks
|
|
||||||
|
|
||||||
# task alert settings
|
# task alert settings
|
||||||
task_email_alert_severity = ArrayField(
|
task_email_alert_severity = ArrayField(
|
||||||
models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices),
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
blank=True,
|
blank=True,
|
||||||
default=list,
|
default=list,
|
||||||
)
|
)
|
||||||
task_text_alert_severity = ArrayField(
|
task_text_alert_severity = ArrayField(
|
||||||
models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices),
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
blank=True,
|
blank=True,
|
||||||
default=list,
|
default=list,
|
||||||
)
|
)
|
||||||
task_dashboard_alert_severity = ArrayField(
|
task_dashboard_alert_severity = ArrayField(
|
||||||
models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices),
|
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||||
blank=True,
|
blank=True,
|
||||||
default=list,
|
default=list,
|
||||||
)
|
)
|
||||||
@@ -969,8 +714,7 @@ class AlertTemplate(BaseAuditModel):
|
|||||||
task_always_text = BooleanField(null=True, blank=True, default=None)
|
task_always_text = BooleanField(null=True, blank=True, default=None)
|
||||||
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||||
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||||
# fmt: off
|
task_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||||
task_script_actions = BooleanField(null=True, blank=True, default=True) # should be renamed because also deals with webhooks
|
|
||||||
|
|
||||||
# exclusion settings
|
# exclusion settings
|
||||||
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
||||||
@@ -994,9 +738,9 @@ class AlertTemplate(BaseAuditModel):
|
|||||||
agent in self.excluded_agents.all()
|
agent in self.excluded_agents.all()
|
||||||
or agent.site in self.excluded_sites.all()
|
or agent.site in self.excluded_sites.all()
|
||||||
or agent.client in self.excluded_clients.all()
|
or agent.client in self.excluded_clients.all()
|
||||||
or agent.monitoring_type == AgentMonType.WORKSTATION
|
or agent.monitoring_type == "workstation"
|
||||||
and self.exclude_workstations
|
and self.exclude_workstations
|
||||||
or agent.monitoring_type == AgentMonType.SERVER
|
or agent.monitoring_type == "server"
|
||||||
and self.exclude_servers
|
and self.exclude_servers
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,6 @@
|
|||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from rest_framework import permissions
|
from rest_framework import permissions
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
from tacticalrmm.constants import AlertTemplateActionType
|
|
||||||
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -33,7 +30,7 @@ def _has_perm_on_alert(user: "User", id: int) -> bool:
|
|||||||
|
|
||||||
class AlertPerms(permissions.BasePermission):
|
class AlertPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view) -> bool:
|
def has_permission(self, r, view) -> bool:
|
||||||
if r.method in ("GET", "PATCH"):
|
if r.method == "GET" or r.method == "PATCH":
|
||||||
if "pk" in view.kwargs.keys():
|
if "pk" in view.kwargs.keys():
|
||||||
return _has_perm(r, "can_list_alerts") and _has_perm_on_alert(
|
return _has_perm(r, "can_list_alerts") and _has_perm_on_alert(
|
||||||
r.user, view.kwargs["pk"]
|
r.user, view.kwargs["pk"]
|
||||||
@@ -53,18 +50,5 @@ class AlertTemplatePerms(permissions.BasePermission):
|
|||||||
def has_permission(self, r, view) -> bool:
|
def has_permission(self, r, view) -> bool:
|
||||||
if r.method == "GET":
|
if r.method == "GET":
|
||||||
return _has_perm(r, "can_list_alerttemplates")
|
return _has_perm(r, "can_list_alerttemplates")
|
||||||
|
else:
|
||||||
if r.method in ("POST", "PUT", "PATCH"):
|
|
||||||
# ensure only users with explicit run server script perms can add/modify alert templates
|
|
||||||
# while also still requiring the manage alert template perm
|
|
||||||
if isinstance(r.data, dict):
|
|
||||||
if (
|
|
||||||
r.data.get("action_type") == AlertTemplateActionType.SERVER
|
|
||||||
or r.data.get("resolved_action_type")
|
|
||||||
== AlertTemplateActionType.SERVER
|
|
||||||
):
|
|
||||||
return _has_perm(r, "can_run_server_scripts") and _has_perm(
|
|
||||||
r, "can_manage_alerttemplates"
|
|
||||||
)
|
|
||||||
|
|
||||||
return _has_perm(r, "can_manage_alerttemplates")
|
return _has_perm(r, "can_manage_alerttemplates")
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
from rest_framework.fields import SerializerMethodField
|
|
||||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
|
||||||
|
|
||||||
from automation.serializers import PolicySerializer
|
from automation.serializers import PolicySerializer
|
||||||
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
|
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
|
||||||
from tacticalrmm.constants import AlertTemplateActionType
|
from rest_framework.fields import SerializerMethodField
|
||||||
|
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||||
|
|
||||||
from .models import Alert, AlertTemplate
|
from .models import Alert, AlertTemplate
|
||||||
|
|
||||||
|
|
||||||
class AlertSerializer(ModelSerializer):
|
class AlertSerializer(ModelSerializer):
|
||||||
|
|
||||||
hostname = ReadOnlyField(source="assigned_agent.hostname")
|
hostname = ReadOnlyField(source="assigned_agent.hostname")
|
||||||
agent_id = ReadOnlyField(source="assigned_agent.agent_id")
|
agent_id = ReadOnlyField(source="assigned_agent.agent_id")
|
||||||
client = ReadOnlyField(source="client.name")
|
client = ReadOnlyField(source="client.name")
|
||||||
@@ -26,29 +25,14 @@ class AlertTemplateSerializer(ModelSerializer):
|
|||||||
task_settings = ReadOnlyField(source="has_task_settings")
|
task_settings = ReadOnlyField(source="has_task_settings")
|
||||||
core_settings = ReadOnlyField(source="has_core_settings")
|
core_settings = ReadOnlyField(source="has_core_settings")
|
||||||
default_template = ReadOnlyField(source="is_default_template")
|
default_template = ReadOnlyField(source="is_default_template")
|
||||||
action_name = SerializerMethodField()
|
action_name = ReadOnlyField(source="action.name")
|
||||||
resolved_action_name = SerializerMethodField()
|
resolved_action_name = ReadOnlyField(source="resolved_action.name")
|
||||||
applied_count = SerializerMethodField()
|
applied_count = SerializerMethodField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = AlertTemplate
|
model = AlertTemplate
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
def get_action_name(self, obj):
|
|
||||||
if obj.action_type == AlertTemplateActionType.REST and obj.action_rest:
|
|
||||||
return obj.action_rest.name
|
|
||||||
|
|
||||||
return obj.action.name if obj.action else ""
|
|
||||||
|
|
||||||
def get_resolved_action_name(self, obj):
|
|
||||||
if (
|
|
||||||
obj.resolved_action_type == AlertTemplateActionType.REST
|
|
||||||
and obj.resolved_action_rest
|
|
||||||
):
|
|
||||||
return obj.resolved_action_rest.name
|
|
||||||
|
|
||||||
return obj.resolved_action.name if obj.resolved_action else ""
|
|
||||||
|
|
||||||
def get_applied_count(self, instance):
|
def get_applied_count(self, instance):
|
||||||
return (
|
return (
|
||||||
instance.policies.count()
|
instance.policies.count()
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
from .models import Alert
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
|
|
||||||
from .models import Alert
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def unsnooze_alerts() -> str:
|
def unsnooze_alerts() -> str:
|
||||||
|
|||||||
@@ -1,23 +1,17 @@
|
|||||||
from datetime import timedelta
|
from datetime import datetime, timedelta
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from alerts.tasks import cache_agents_alert_template
|
from alerts.tasks import cache_agents_alert_template
|
||||||
from autotasks.models import TaskResult
|
|
||||||
from core.tasks import cache_db_fields_task, resolve_alerts_task
|
|
||||||
from core.utils import get_core_settings
|
from core.utils import get_core_settings
|
||||||
|
from core.tasks import cache_db_fields_task, handle_resolved_stuff
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from model_bakery import baker, seq
|
from model_bakery import baker, seq
|
||||||
from tacticalrmm.constants import (
|
|
||||||
AgentMonType,
|
|
||||||
AlertSeverity,
|
|
||||||
AlertType,
|
|
||||||
CheckStatus,
|
|
||||||
URLActionType,
|
|
||||||
)
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
|
from autotasks.models import TaskResult
|
||||||
from .models import Alert, AlertTemplate
|
from .models import Alert, AlertTemplate
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
AlertSerializer,
|
AlertSerializer,
|
||||||
@@ -33,7 +27,6 @@ class TestAlertsViews(TacticalTestCase):
|
|||||||
self.authenticate()
|
self.authenticate()
|
||||||
self.setup_coresettings()
|
self.setup_coresettings()
|
||||||
|
|
||||||
"""
|
|
||||||
def test_get_alerts(self):
|
def test_get_alerts(self):
|
||||||
url = "/alerts/"
|
url = "/alerts/"
|
||||||
|
|
||||||
@@ -45,15 +38,15 @@ class TestAlertsViews(TacticalTestCase):
|
|||||||
alerts = baker.make(
|
alerts = baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
agent=agent,
|
agent=agent,
|
||||||
alert_time=seq(djangotime.now(), timedelta(days=15)),
|
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||||
severity=AlertSeverity.WARNING,
|
severity="warning",
|
||||||
_quantity=3,
|
_quantity=3,
|
||||||
)
|
)
|
||||||
baker.make(
|
baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
assigned_check=check,
|
assigned_check=check,
|
||||||
alert_time=seq(djangotime.now(), timedelta(days=15)),
|
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||||
severity=AlertSeverity.ERROR,
|
severity="error",
|
||||||
_quantity=7,
|
_quantity=7,
|
||||||
)
|
)
|
||||||
baker.make(
|
baker.make(
|
||||||
@@ -61,7 +54,7 @@ class TestAlertsViews(TacticalTestCase):
|
|||||||
assigned_task=task,
|
assigned_task=task,
|
||||||
snoozed=True,
|
snoozed=True,
|
||||||
snooze_until=djangotime.now(),
|
snooze_until=djangotime.now(),
|
||||||
alert_time=seq(djangotime.now(), timedelta(days=15)),
|
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||||
_quantity=2,
|
_quantity=2,
|
||||||
)
|
)
|
||||||
baker.make(
|
baker.make(
|
||||||
@@ -69,7 +62,7 @@ class TestAlertsViews(TacticalTestCase):
|
|||||||
agent=agent,
|
agent=agent,
|
||||||
resolved=True,
|
resolved=True,
|
||||||
resolved_on=djangotime.now(),
|
resolved_on=djangotime.now(),
|
||||||
alert_time=seq(djangotime.now(), timedelta(days=15)),
|
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||||
_quantity=9,
|
_quantity=9,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -77,8 +70,8 @@ class TestAlertsViews(TacticalTestCase):
|
|||||||
data = {"top": 3}
|
data = {"top": 3}
|
||||||
resp = self.client.patch(url, data, format="json")
|
resp = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEqual(resp.data["alerts"], AlertSerializer(alerts, many=True).data)
|
self.assertEquals(resp.data["alerts"], AlertSerializer(alerts, many=True).data)
|
||||||
self.assertEqual(resp.data["alerts_count"], 10)
|
self.assertEquals(resp.data["alerts_count"], 10)
|
||||||
|
|
||||||
# test filter data
|
# test filter data
|
||||||
# test data and result counts
|
# test data and result counts
|
||||||
@@ -126,14 +119,13 @@ class TestAlertsViews(TacticalTestCase):
|
|||||||
self.assertEqual(len(resp.data), req["count"])
|
self.assertEqual(len(resp.data), req["count"])
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
"""
|
|
||||||
|
|
||||||
def test_add_alert(self):
|
def test_add_alert(self):
|
||||||
url = "/alerts/"
|
url = "/alerts/"
|
||||||
|
|
||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
data = {
|
data = {
|
||||||
"alert_time": djangotime.now(),
|
"alert_time": datetime.now(),
|
||||||
"agent": agent.id,
|
"agent": agent.id,
|
||||||
"severity": "warning",
|
"severity": "warning",
|
||||||
"alert_type": "availability",
|
"alert_type": "availability",
|
||||||
@@ -282,32 +274,12 @@ class TestAlertsViews(TacticalTestCase):
|
|||||||
resp = self.client.get("/alerts/templates/500/", format="json")
|
resp = self.client.get("/alerts/templates/500/", format="json")
|
||||||
self.assertEqual(resp.status_code, 404)
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
agent_script = baker.make("scripts.Script")
|
alert_template = baker.make("alerts.AlertTemplate")
|
||||||
server_script = baker.make("scripts.Script")
|
url = f"/alerts/templates/{alert_template.pk}/"
|
||||||
webhook = baker.make("core.URLAction", action_type=URLActionType.REST)
|
|
||||||
|
|
||||||
alert_template_agent_script = baker.make(
|
|
||||||
"alerts.AlertTemplate", action=agent_script
|
|
||||||
)
|
|
||||||
url = f"/alerts/templates/{alert_template_agent_script.pk}/"
|
|
||||||
resp = self.client.get(url, format="json")
|
resp = self.client.get(url, format="json")
|
||||||
serializer = AlertTemplateSerializer(alert_template_agent_script)
|
serializer = AlertTemplateSerializer(alert_template)
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertEqual(resp.data, serializer.data)
|
|
||||||
|
|
||||||
alert_template_server_script = baker.make(
|
|
||||||
"alerts.AlertTemplate", action=server_script
|
|
||||||
)
|
|
||||||
url = f"/alerts/templates/{alert_template_server_script.pk}/"
|
|
||||||
resp = self.client.get(url, format="json")
|
|
||||||
serializer = AlertTemplateSerializer(alert_template_server_script)
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
|
||||||
self.assertEqual(resp.data, serializer.data)
|
|
||||||
|
|
||||||
alert_template_webhook = baker.make("alerts.AlertTemplate", action_rest=webhook)
|
|
||||||
url = f"/alerts/templates/{alert_template_webhook.pk}/"
|
|
||||||
resp = self.client.get(url, format="json")
|
|
||||||
serializer = AlertTemplateSerializer(alert_template_webhook)
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEqual(resp.data, serializer.data)
|
self.assertEqual(resp.data, serializer.data)
|
||||||
|
|
||||||
@@ -390,7 +362,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
not_snoozed = baker.make(
|
not_snoozed = baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
snoozed=True,
|
snoozed=True,
|
||||||
snooze_until=seq(djangotime.now(), timedelta(days=15)),
|
snooze_until=seq(datetime.now(), timedelta(days=15)),
|
||||||
_quantity=5,
|
_quantity=5,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -398,7 +370,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
snoozed = baker.make(
|
snoozed = baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
snoozed=True,
|
snoozed=True,
|
||||||
snooze_until=seq(djangotime.now(), timedelta(days=-15)),
|
snooze_until=seq(datetime.now(), timedelta(days=-15)),
|
||||||
_quantity=5,
|
_quantity=5,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -416,12 +388,11 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_agent_gets_correct_alert_template(self):
|
def test_agent_gets_correct_alert_template(self):
|
||||||
|
|
||||||
core = get_core_settings()
|
core = get_core_settings()
|
||||||
# setup data
|
# setup data
|
||||||
workstation = baker.make_recipe(
|
workstation = baker.make_recipe("agents.agent", monitoring_type="workstation")
|
||||||
"agents.agent", monitoring_type=AgentMonType.WORKSTATION
|
server = baker.make_recipe("agents.agent", monitoring_type="server")
|
||||||
)
|
|
||||||
server = baker.make_recipe("agents.agent", monitoring_type=AgentMonType.SERVER)
|
|
||||||
|
|
||||||
policy = baker.make("automation.Policy", active=True)
|
policy = baker.make("automation.Policy", active=True)
|
||||||
|
|
||||||
@@ -438,15 +409,15 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
core.server_policy = policy
|
core.server_policy = policy
|
||||||
core.save()
|
core.save()
|
||||||
|
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[0].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[0].pk)
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[0].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[0].pk)
|
||||||
|
|
||||||
# assign second Alert Template to as default alert template
|
# assign second Alert Template to as default alert template
|
||||||
core.alert_template = alert_templates[1]
|
core.alert_template = alert_templates[1]
|
||||||
core.save()
|
core.save()
|
||||||
|
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[1].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[1].pk)
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[1].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[1].pk)
|
||||||
|
|
||||||
# assign third Alert Template to client
|
# assign third Alert Template to client
|
||||||
workstation.client.alert_template = alert_templates[2]
|
workstation.client.alert_template = alert_templates[2]
|
||||||
@@ -454,8 +425,8 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
workstation.client.save()
|
workstation.client.save()
|
||||||
server.client.save()
|
server.client.save()
|
||||||
|
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[2].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[2].pk)
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[2].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[2].pk)
|
||||||
|
|
||||||
# apply policy to client and should override
|
# apply policy to client and should override
|
||||||
workstation.client.workstation_policy = policy
|
workstation.client.workstation_policy = policy
|
||||||
@@ -463,8 +434,8 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
workstation.client.save()
|
workstation.client.save()
|
||||||
server.client.save()
|
server.client.save()
|
||||||
|
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[0].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[0].pk)
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[0].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[0].pk)
|
||||||
|
|
||||||
# assign fouth Alert Template to site
|
# assign fouth Alert Template to site
|
||||||
workstation.site.alert_template = alert_templates[3]
|
workstation.site.alert_template = alert_templates[3]
|
||||||
@@ -472,8 +443,8 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
workstation.site.save()
|
workstation.site.save()
|
||||||
server.site.save()
|
server.site.save()
|
||||||
|
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[3].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[3].pk)
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[3].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[3].pk)
|
||||||
|
|
||||||
# apply policy to site
|
# apply policy to site
|
||||||
workstation.site.workstation_policy = policy
|
workstation.site.workstation_policy = policy
|
||||||
@@ -481,8 +452,8 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
workstation.site.save()
|
workstation.site.save()
|
||||||
server.site.save()
|
server.site.save()
|
||||||
|
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[0].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[0].pk)
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[0].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[0].pk)
|
||||||
|
|
||||||
# apply policy to agents
|
# apply policy to agents
|
||||||
workstation.policy = policy
|
workstation.policy = policy
|
||||||
@@ -490,35 +461,35 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
workstation.save()
|
workstation.save()
|
||||||
server.save()
|
server.save()
|
||||||
|
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[0].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[0].pk)
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[0].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[0].pk)
|
||||||
|
|
||||||
# test disabling alert template
|
# test disabling alert template
|
||||||
alert_templates[0].is_active = False
|
alert_templates[0].is_active = False
|
||||||
alert_templates[0].save()
|
alert_templates[0].save()
|
||||||
|
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[3].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[3].pk)
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[3].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[3].pk)
|
||||||
|
|
||||||
# test policy exclusions
|
# test policy exclusions
|
||||||
alert_templates[3].excluded_agents.set([workstation.pk])
|
alert_templates[3].excluded_agents.set([workstation.pk])
|
||||||
|
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[2].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[2].pk)
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[3].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[3].pk)
|
||||||
|
|
||||||
# test workstation exclusions
|
# test workstation exclusions
|
||||||
alert_templates[2].exclude_workstations = True
|
alert_templates[2].exclude_workstations = True
|
||||||
alert_templates[2].save()
|
alert_templates[2].save()
|
||||||
|
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[1].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[1].pk)
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[3].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[3].pk)
|
||||||
|
|
||||||
# test server exclusions
|
# test server exclusions
|
||||||
alert_templates[3].exclude_servers = True
|
alert_templates[3].exclude_servers = True
|
||||||
alert_templates[3].save()
|
alert_templates[3].save()
|
||||||
|
|
||||||
self.assertEqual(workstation.set_alert_template().pk, alert_templates[1].pk)
|
self.assertEquals(workstation.set_alert_template().pk, alert_templates[1].pk)
|
||||||
self.assertEqual(server.set_alert_template().pk, alert_templates[2].pk)
|
self.assertEquals(server.set_alert_template().pk, alert_templates[2].pk)
|
||||||
|
|
||||||
@patch("agents.tasks.sleep")
|
@patch("agents.tasks.sleep")
|
||||||
@patch("core.models.CoreSettings.send_mail")
|
@patch("core.models.CoreSettings.send_mail")
|
||||||
@@ -552,7 +523,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
# call outages task and no alert should be created
|
# call outages task and no alert should be created
|
||||||
agent_outages_task()
|
agent_outages_task()
|
||||||
|
|
||||||
self.assertEqual(Alert.objects.count(), 0)
|
self.assertEquals(Alert.objects.count(), 0)
|
||||||
|
|
||||||
# set overdue_dashboard_alert and alert should be created
|
# set overdue_dashboard_alert and alert should be created
|
||||||
agent_dashboard_alert.overdue_dashboard_alert = True
|
agent_dashboard_alert.overdue_dashboard_alert = True
|
||||||
@@ -603,22 +574,22 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
agent_outages_task()
|
agent_outages_task()
|
||||||
|
|
||||||
# should have created 6 alerts
|
# should have created 6 alerts
|
||||||
self.assertEqual(Alert.objects.count(), 6)
|
self.assertEquals(Alert.objects.count(), 6)
|
||||||
|
|
||||||
# other specific agents should have created alerts
|
# other specific agents should have created alerts
|
||||||
self.assertEqual(Alert.objects.filter(agent=agent_dashboard_alert).count(), 1)
|
self.assertEquals(Alert.objects.filter(agent=agent_dashboard_alert).count(), 1)
|
||||||
self.assertEqual(Alert.objects.filter(agent=agent_text_alert).count(), 1)
|
self.assertEquals(Alert.objects.filter(agent=agent_text_alert).count(), 1)
|
||||||
self.assertEqual(Alert.objects.filter(agent=agent_email_alert).count(), 1)
|
self.assertEquals(Alert.objects.filter(agent=agent_email_alert).count(), 1)
|
||||||
self.assertEqual(Alert.objects.filter(agent=agent_template_email).count(), 1)
|
self.assertEquals(Alert.objects.filter(agent=agent_template_email).count(), 1)
|
||||||
self.assertEqual(
|
self.assertEquals(
|
||||||
Alert.objects.filter(agent=agent_template_dashboard).count(), 1
|
Alert.objects.filter(agent=agent_template_dashboard).count(), 1
|
||||||
)
|
)
|
||||||
self.assertEqual(Alert.objects.filter(agent=agent_template_text).count(), 1)
|
self.assertEquals(Alert.objects.filter(agent=agent_template_text).count(), 1)
|
||||||
self.assertEqual(Alert.objects.filter(agent=agent_template_blank).count(), 0)
|
self.assertEquals(Alert.objects.filter(agent=agent_template_blank).count(), 0)
|
||||||
|
|
||||||
# check if email and text tasks were called
|
# check if email and text tasks were called
|
||||||
self.assertEqual(outage_email.call_count, 2)
|
self.assertEquals(outage_email.call_count, 2)
|
||||||
self.assertEqual(outage_sms.call_count, 2)
|
self.assertEquals(outage_sms.call_count, 2)
|
||||||
|
|
||||||
outage_sms.assert_any_call(
|
outage_sms.assert_any_call(
|
||||||
pk=Alert.objects.get(agent=agent_text_alert).pk, alert_interval=None
|
pk=Alert.objects.get(agent=agent_text_alert).pk, alert_interval=None
|
||||||
@@ -659,7 +630,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
|
|
||||||
# calling agent outage task again shouldn't create duplicate alerts and won't send alerts
|
# calling agent outage task again shouldn't create duplicate alerts and won't send alerts
|
||||||
agent_outages_task()
|
agent_outages_task()
|
||||||
self.assertEqual(Alert.objects.count(), 6)
|
self.assertEquals(Alert.objects.count(), 6)
|
||||||
|
|
||||||
# test periodic notification
|
# test periodic notification
|
||||||
# change email/text sent to sometime in the past
|
# change email/text sent to sometime in the past
|
||||||
@@ -703,6 +674,8 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
agent_template_email = Agent.objects.get(pk=agent_template_email.pk)
|
agent_template_email = Agent.objects.get(pk=agent_template_email.pk)
|
||||||
|
|
||||||
# have the two agents checkin
|
# have the two agents checkin
|
||||||
|
url = "/api/v3/checkin/"
|
||||||
|
|
||||||
agent_template_text.version = settings.LATEST_AGENT_VER
|
agent_template_text.version = settings.LATEST_AGENT_VER
|
||||||
agent_template_text.last_seen = djangotime.now()
|
agent_template_text.last_seen = djangotime.now()
|
||||||
agent_template_text.save()
|
agent_template_text.save()
|
||||||
@@ -712,7 +685,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
agent_template_email.save()
|
agent_template_email.save()
|
||||||
|
|
||||||
cache_db_fields_task()
|
cache_db_fields_task()
|
||||||
resolve_alerts_task()
|
handle_resolved_stuff()
|
||||||
|
|
||||||
recovery_sms.assert_called_with(
|
recovery_sms.assert_called_with(
|
||||||
pk=Alert.objects.get(agent=agent_template_text).pk
|
pk=Alert.objects.get(agent=agent_template_text).pk
|
||||||
@@ -780,7 +753,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
"alerts.AlertTemplate",
|
"alerts.AlertTemplate",
|
||||||
is_active=True,
|
is_active=True,
|
||||||
check_always_email=True,
|
check_always_email=True,
|
||||||
check_email_alert_severity=[AlertSeverity.WARNING],
|
check_email_alert_severity=["warning"],
|
||||||
)
|
)
|
||||||
agent_template_email.client.alert_template = alert_template_email
|
agent_template_email.client.alert_template = alert_template_email
|
||||||
agent_template_email.client.save()
|
agent_template_email.client.save()
|
||||||
@@ -791,12 +764,8 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
is_active=True,
|
is_active=True,
|
||||||
check_always_alert=True,
|
check_always_alert=True,
|
||||||
check_always_text=True,
|
check_always_text=True,
|
||||||
check_dashboard_alert_severity=[
|
check_dashboard_alert_severity=["info", "warning", "error"],
|
||||||
AlertSeverity.INFO,
|
check_text_alert_severity=["error"],
|
||||||
AlertSeverity.WARNING,
|
|
||||||
AlertSeverity.ERROR,
|
|
||||||
],
|
|
||||||
check_text_alert_severity=[AlertSeverity.ERROR],
|
|
||||||
)
|
)
|
||||||
agent_template_dashboard_text.client.alert_template = (
|
agent_template_dashboard_text.client.alert_template = (
|
||||||
alert_template_dashboard_text
|
alert_template_dashboard_text
|
||||||
@@ -820,7 +789,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
"checks.CheckResult",
|
"checks.CheckResult",
|
||||||
assigned_check=check_agent,
|
assigned_check=check_agent,
|
||||||
agent=agent,
|
agent=agent,
|
||||||
alert_severity=AlertSeverity.WARNING,
|
alert_severity="warning",
|
||||||
)
|
)
|
||||||
check_template_email = baker.make_recipe(
|
check_template_email = baker.make_recipe(
|
||||||
"checks.cpuload_check", agent=agent_template_email
|
"checks.cpuload_check", agent=agent_template_email
|
||||||
@@ -871,8 +840,8 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# test agent with check that has alert settings
|
# test agent with check that has alert settings
|
||||||
check_agent_result.alert_severity = AlertSeverity.WARNING
|
check_agent_result.alert_severity = "warning"
|
||||||
check_agent_result.status = CheckStatus.FAILING
|
check_agent_result.status = "failing"
|
||||||
|
|
||||||
Alert.handle_alert_failure(check_agent_result)
|
Alert.handle_alert_failure(check_agent_result)
|
||||||
|
|
||||||
@@ -933,7 +902,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
outage_sms.assert_not_called
|
outage_sms.assert_not_called
|
||||||
|
|
||||||
# update check alert severity to error
|
# update check alert severity to error
|
||||||
check_template_dashboard_text_result.alert_severity = AlertSeverity.ERROR
|
check_template_dashboard_text_result.alert_severity = "error"
|
||||||
check_template_dashboard_text_result.save()
|
check_template_dashboard_text_result.save()
|
||||||
|
|
||||||
# now should trigger alert
|
# now should trigger alert
|
||||||
@@ -973,7 +942,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
send_email.assert_not_called()
|
send_email.assert_not_called()
|
||||||
send_sms.assert_not_called()
|
send_sms.assert_not_called()
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEquals(
|
||||||
Alert.objects.filter(assigned_check=check_template_email).count(), 1
|
Alert.objects.filter(assigned_check=check_template_email).count(), 1
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1092,7 +1061,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
"alerts.AlertTemplate",
|
"alerts.AlertTemplate",
|
||||||
is_active=True,
|
is_active=True,
|
||||||
task_always_email=True,
|
task_always_email=True,
|
||||||
task_email_alert_severity=[AlertSeverity.WARNING],
|
task_email_alert_severity=["warning"],
|
||||||
)
|
)
|
||||||
agent_template_email.client.alert_template = alert_template_email
|
agent_template_email.client.alert_template = alert_template_email
|
||||||
agent_template_email.client.save()
|
agent_template_email.client.save()
|
||||||
@@ -1103,12 +1072,8 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
is_active=True,
|
is_active=True,
|
||||||
task_always_alert=True,
|
task_always_alert=True,
|
||||||
task_always_text=True,
|
task_always_text=True,
|
||||||
task_dashboard_alert_severity=[
|
task_dashboard_alert_severity=["info", "warning", "error"],
|
||||||
AlertSeverity.INFO,
|
task_text_alert_severity=["error"],
|
||||||
AlertSeverity.WARNING,
|
|
||||||
AlertSeverity.ERROR,
|
|
||||||
],
|
|
||||||
task_text_alert_severity=[AlertSeverity.ERROR],
|
|
||||||
)
|
)
|
||||||
agent_template_dashboard_text.client.alert_template = (
|
agent_template_dashboard_text.client.alert_template = (
|
||||||
alert_template_dashboard_text
|
alert_template_dashboard_text
|
||||||
@@ -1127,7 +1092,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
email_alert=True,
|
email_alert=True,
|
||||||
text_alert=True,
|
text_alert=True,
|
||||||
dashboard_alert=True,
|
dashboard_alert=True,
|
||||||
alert_severity=AlertSeverity.WARNING,
|
alert_severity="warning",
|
||||||
)
|
)
|
||||||
task_agent_result = baker.make(
|
task_agent_result = baker.make(
|
||||||
"autotasks.TaskResult", agent=agent, task=task_agent
|
"autotasks.TaskResult", agent=agent, task=task_agent
|
||||||
@@ -1135,7 +1100,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
task_template_email = baker.make(
|
task_template_email = baker.make(
|
||||||
"autotasks.AutomatedTask",
|
"autotasks.AutomatedTask",
|
||||||
agent=agent_template_email,
|
agent=agent_template_email,
|
||||||
alert_severity=AlertSeverity.WARNING,
|
alert_severity="warning",
|
||||||
)
|
)
|
||||||
task_template_email_result = baker.make(
|
task_template_email_result = baker.make(
|
||||||
"autotasks.TaskResult", agent=agent_template_email, task=task_template_email
|
"autotasks.TaskResult", agent=agent_template_email, task=task_template_email
|
||||||
@@ -1143,7 +1108,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
task_template_dashboard_text = baker.make(
|
task_template_dashboard_text = baker.make(
|
||||||
"autotasks.AutomatedTask",
|
"autotasks.AutomatedTask",
|
||||||
agent=agent_template_dashboard_text,
|
agent=agent_template_dashboard_text,
|
||||||
alert_severity=AlertSeverity.INFO,
|
alert_severity="info",
|
||||||
)
|
)
|
||||||
task_template_dashboard_text_result = baker.make(
|
task_template_dashboard_text_result = baker.make(
|
||||||
"autotasks.TaskResult",
|
"autotasks.TaskResult",
|
||||||
@@ -1153,15 +1118,13 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
task_template_blank = baker.make(
|
task_template_blank = baker.make(
|
||||||
"autotasks.AutomatedTask",
|
"autotasks.AutomatedTask",
|
||||||
agent=agent_template_blank,
|
agent=agent_template_blank,
|
||||||
alert_severity=AlertSeverity.ERROR,
|
alert_severity="error",
|
||||||
)
|
)
|
||||||
task_template_blank_result = baker.make(
|
task_template_blank_result = baker.make(
|
||||||
"autotasks.TaskResult", agent=agent_template_blank, task=task_template_blank
|
"autotasks.TaskResult", agent=agent_template_blank, task=task_template_blank
|
||||||
)
|
)
|
||||||
task_no_settings = baker.make(
|
task_no_settings = baker.make(
|
||||||
"autotasks.AutomatedTask",
|
"autotasks.AutomatedTask", agent=agent_no_settings, alert_severity="warning"
|
||||||
agent=agent_no_settings,
|
|
||||||
alert_severity=AlertSeverity.WARNING,
|
|
||||||
)
|
)
|
||||||
task_no_settings_result = baker.make(
|
task_no_settings_result = baker.make(
|
||||||
"autotasks.TaskResult", agent=agent_no_settings, task=task_no_settings
|
"autotasks.TaskResult", agent=agent_no_settings, task=task_no_settings
|
||||||
@@ -1239,7 +1202,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
outage_sms.assert_not_called
|
outage_sms.assert_not_called
|
||||||
|
|
||||||
# update task alert seveity to error
|
# update task alert seveity to error
|
||||||
task_template_dashboard_text.alert_severity = AlertSeverity.ERROR
|
task_template_dashboard_text.alert_severity = "error"
|
||||||
task_template_dashboard_text.save()
|
task_template_dashboard_text.save()
|
||||||
|
|
||||||
# now should trigger alert
|
# now should trigger alert
|
||||||
@@ -1281,7 +1244,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
send_email.assert_not_called()
|
send_email.assert_not_called()
|
||||||
send_sms.assert_not_called()
|
send_sms.assert_not_called()
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEquals(
|
||||||
Alert.objects.filter(assigned_task=task_template_email).count(), 1
|
Alert.objects.filter(assigned_task=task_template_email).count(), 1
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1397,7 +1360,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
def test_alert_actions(
|
def test_alert_actions(
|
||||||
self, recovery_sms, recovery_email, outage_email, outage_sms, nats_cmd
|
self, recovery_sms, recovery_email, outage_email, outage_sms, nats_cmd
|
||||||
):
|
):
|
||||||
from agents.models import AgentHistory
|
|
||||||
from agents.tasks import agent_outages_task
|
from agents.tasks import agent_outages_task
|
||||||
|
|
||||||
# Setup cmd mock
|
# Setup cmd mock
|
||||||
@@ -1423,12 +1386,9 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
agent_script_actions=False,
|
agent_script_actions=False,
|
||||||
action=failure_action,
|
action=failure_action,
|
||||||
action_timeout=30,
|
action_timeout=30,
|
||||||
action_args=["hello", "world"],
|
|
||||||
action_env_vars=["hello=world", "foo=bar"],
|
|
||||||
resolved_action=resolved_action,
|
resolved_action=resolved_action,
|
||||||
resolved_action_timeout=35,
|
resolved_action_timeout=35,
|
||||||
resolved_action_args=["nice_arg"],
|
resolved_action_args=["nice_arg"],
|
||||||
resolved_action_env_vars=["resolved=action", "env=vars"],
|
|
||||||
)
|
)
|
||||||
agent.client.alert_template = alert_template
|
agent.client.alert_template = alert_template
|
||||||
agent.client.save()
|
agent.client.save()
|
||||||
@@ -1449,13 +1409,8 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
data = {
|
data = {
|
||||||
"func": "runscriptfull",
|
"func": "runscriptfull",
|
||||||
"timeout": 30,
|
"timeout": 30,
|
||||||
"script_args": ["hello", "world"],
|
"script_args": [],
|
||||||
"payload": {"code": failure_action.code, "shell": failure_action.shell},
|
"payload": {"code": failure_action.code, "shell": failure_action.shell},
|
||||||
"run_as_user": False,
|
|
||||||
"env_vars": ["hello=world", "foo=bar"],
|
|
||||||
"id": AgentHistory.objects.last().pk, # type: ignore
|
|
||||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
|
||||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
nats_cmd.assert_called_with(data, timeout=30, wait=True)
|
nats_cmd.assert_called_with(data, timeout=30, wait=True)
|
||||||
@@ -1476,7 +1431,7 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
agent.save()
|
agent.save()
|
||||||
|
|
||||||
cache_db_fields_task()
|
cache_db_fields_task()
|
||||||
resolve_alerts_task()
|
handle_resolved_stuff()
|
||||||
|
|
||||||
# this is what data should be
|
# this is what data should be
|
||||||
data = {
|
data = {
|
||||||
@@ -1484,11 +1439,6 @@ class TestAlertTasks(TacticalTestCase):
|
|||||||
"timeout": 35,
|
"timeout": 35,
|
||||||
"script_args": ["nice_arg"],
|
"script_args": ["nice_arg"],
|
||||||
"payload": {"code": resolved_action.code, "shell": resolved_action.shell},
|
"payload": {"code": resolved_action.code, "shell": resolved_action.shell},
|
||||||
"run_as_user": False,
|
|
||||||
"env_vars": ["resolved=action", "env=vars"],
|
|
||||||
"id": AgentHistory.objects.last().pk, # type: ignore
|
|
||||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
|
||||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
nats_cmd.assert_called_with(data, timeout=35, wait=True)
|
nats_cmd.assert_called_with(data, timeout=35, wait=True)
|
||||||
@@ -1559,25 +1509,22 @@ class TestAlertPermissions(TacticalTestCase):
|
|||||||
tasks = baker.make("autotasks.AutomatedTask", agent=cycle(agents), _quantity=3)
|
tasks = baker.make("autotasks.AutomatedTask", agent=cycle(agents), _quantity=3)
|
||||||
baker.make(
|
baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
alert_type=AlertType.TASK,
|
alert_type="task",
|
||||||
agent=cycle(agents),
|
agent=cycle(agents),
|
||||||
assigned_task=cycle(tasks),
|
assigned_task=cycle(tasks),
|
||||||
_quantity=3,
|
_quantity=3,
|
||||||
)
|
)
|
||||||
baker.make(
|
baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
alert_type=AlertType.CHECK,
|
alert_type="check",
|
||||||
agent=cycle(agents),
|
agent=cycle(agents),
|
||||||
assigned_check=cycle(checks),
|
assigned_check=cycle(checks),
|
||||||
_quantity=3,
|
_quantity=3,
|
||||||
)
|
)
|
||||||
baker.make(
|
baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert", alert_type="availability", agent=cycle(agents), _quantity=3
|
||||||
alert_type=AlertType.AVAILABILITY,
|
|
||||||
agent=cycle(agents),
|
|
||||||
_quantity=3,
|
|
||||||
)
|
)
|
||||||
baker.make("alerts.Alert", alert_type=AlertType.CUSTOM, _quantity=4)
|
baker.make("alerts.Alert", alert_type="custom", _quantity=4)
|
||||||
|
|
||||||
# test super user access
|
# test super user access
|
||||||
r = self.check_authorized_superuser("patch", f"{base_url}/")
|
r = self.check_authorized_superuser("patch", f"{base_url}/")
|
||||||
@@ -1621,27 +1568,22 @@ class TestAlertPermissions(TacticalTestCase):
|
|||||||
tasks = baker.make("autotasks.AutomatedTask", agent=cycle(agents), _quantity=3)
|
tasks = baker.make("autotasks.AutomatedTask", agent=cycle(agents), _quantity=3)
|
||||||
alert_tasks = baker.make(
|
alert_tasks = baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
alert_type=AlertType.TASK,
|
alert_type="task",
|
||||||
agent=cycle(agents),
|
agent=cycle(agents),
|
||||||
assigned_task=cycle(tasks),
|
assigned_task=cycle(tasks),
|
||||||
_quantity=3,
|
_quantity=3,
|
||||||
)
|
)
|
||||||
alert_checks = baker.make(
|
alert_checks = baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
alert_type=AlertType.CHECK,
|
alert_type="check",
|
||||||
agent=cycle(agents),
|
agent=cycle(agents),
|
||||||
assigned_check=cycle(checks),
|
assigned_check=cycle(checks),
|
||||||
_quantity=3,
|
_quantity=3,
|
||||||
)
|
)
|
||||||
alert_agents = baker.make(
|
alert_agents = baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert", alert_type="availability", agent=cycle(agents), _quantity=3
|
||||||
alert_type=AlertType.AVAILABILITY,
|
|
||||||
agent=cycle(agents),
|
|
||||||
_quantity=3,
|
|
||||||
)
|
|
||||||
alert_custom = baker.make(
|
|
||||||
"alerts.Alert", alert_type=AlertType.CUSTOM, _quantity=4
|
|
||||||
)
|
)
|
||||||
|
alert_custom = baker.make("alerts.Alert", alert_type="custom", _quantity=4)
|
||||||
|
|
||||||
# alert task url
|
# alert task url
|
||||||
task_url = f"{base_url}/{alert_tasks[0].id}/" # for agent
|
task_url = f"{base_url}/{alert_tasks[0].id}/" # for agent
|
||||||
@@ -1662,7 +1604,8 @@ class TestAlertPermissions(TacticalTestCase):
|
|||||||
unauthorized_task_url,
|
unauthorized_task_url,
|
||||||
]
|
]
|
||||||
|
|
||||||
for method in ("get", "put", "delete"):
|
for method in ["get", "put", "delete"]:
|
||||||
|
|
||||||
# test superuser access
|
# test superuser access
|
||||||
for url in authorized_urls:
|
for url in authorized_urls:
|
||||||
self.check_authorized_superuser(method, url)
|
self.check_authorized_superuser(method, url)
|
||||||
@@ -1717,7 +1660,7 @@ class TestAlertPermissions(TacticalTestCase):
|
|||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
alerts = baker.make(
|
alerts = baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
alert_type=AlertType.AVAILABILITY,
|
alert_type="availability",
|
||||||
agent=agent,
|
agent=agent,
|
||||||
resolved=False,
|
resolved=False,
|
||||||
_quantity=3,
|
_quantity=3,
|
||||||
@@ -1731,7 +1674,7 @@ class TestAlertPermissions(TacticalTestCase):
|
|||||||
# make sure only 1 alert is not resolved
|
# make sure only 1 alert is not resolved
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
Alert.objects.filter(
|
Alert.objects.filter(
|
||||||
alert_type=AlertType.AVAILABILITY, agent=agent, resolved=False
|
alert_type="availability", agent=agent, resolved=False
|
||||||
).count(),
|
).count(),
|
||||||
1,
|
1,
|
||||||
)
|
)
|
||||||
@@ -1741,7 +1684,7 @@ class TestAlertPermissions(TacticalTestCase):
|
|||||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||||
alerts = baker.make(
|
alerts = baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
alert_type=AlertType.CHECK,
|
alert_type="check",
|
||||||
assigned_check=check,
|
assigned_check=check,
|
||||||
agent=agent,
|
agent=agent,
|
||||||
resolved=False,
|
resolved=False,
|
||||||
@@ -1756,7 +1699,7 @@ class TestAlertPermissions(TacticalTestCase):
|
|||||||
# make sure only 1 alert is not resolved
|
# make sure only 1 alert is not resolved
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
Alert.objects.filter(
|
Alert.objects.filter(
|
||||||
alert_type=AlertType.CHECK, agent=agent, resolved=False
|
alert_type="check", agent=agent, resolved=False
|
||||||
).count(),
|
).count(),
|
||||||
1,
|
1,
|
||||||
)
|
)
|
||||||
@@ -1766,7 +1709,7 @@ class TestAlertPermissions(TacticalTestCase):
|
|||||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||||
alerts = baker.make(
|
alerts = baker.make(
|
||||||
"alerts.Alert",
|
"alerts.Alert",
|
||||||
alert_type=AlertType.TASK,
|
alert_type="task",
|
||||||
assigned_task=task,
|
assigned_task=task,
|
||||||
agent=agent,
|
agent=agent,
|
||||||
resolved=False,
|
resolved=False,
|
||||||
@@ -1781,7 +1724,7 @@ class TestAlertPermissions(TacticalTestCase):
|
|||||||
# make sure only 1 alert is not resolved
|
# make sure only 1 alert is not resolved
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
Alert.objects.filter(
|
Alert.objects.filter(
|
||||||
alert_type=AlertType.TASK, agent=agent, resolved=False
|
alert_type="task", agent=agent, resolved=False
|
||||||
).count(),
|
).count(),
|
||||||
1,
|
1,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from rest_framework.permissions import IsAuthenticated
|
|||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from tacticalrmm.helpers import notify_error
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
from .models import Alert, AlertTemplate
|
from .models import Alert, AlertTemplate
|
||||||
from .permissions import AlertPerms, AlertTemplatePerms
|
from .permissions import AlertPerms, AlertTemplatePerms
|
||||||
@@ -23,18 +23,15 @@ class GetAddAlerts(APIView):
|
|||||||
permission_classes = [IsAuthenticated, AlertPerms]
|
permission_classes = [IsAuthenticated, AlertPerms]
|
||||||
|
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
|
|
||||||
# top 10 alerts for dashboard icon
|
# top 10 alerts for dashboard icon
|
||||||
if "top" in request.data.keys():
|
if "top" in request.data.keys():
|
||||||
alerts = (
|
alerts = Alert.objects.filter(
|
||||||
Alert.objects.filter_by_role(request.user) # type: ignore
|
resolved=False, snoozed=False, hidden=False
|
||||||
.filter(resolved=False, snoozed=False, hidden=False)
|
).order_by("alert_time")[: int(request.data["top"])]
|
||||||
.order_by("alert_time")[: int(request.data["top"])]
|
count = Alert.objects.filter(
|
||||||
)
|
resolved=False, snoozed=False, hidden=False
|
||||||
count = (
|
).count()
|
||||||
Alert.objects.filter_by_role(request.user) # type: ignore
|
|
||||||
.filter(resolved=False, snoozed=False, hidden=False)
|
|
||||||
.count()
|
|
||||||
)
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"alerts_count": count,
|
"alerts_count": count,
|
||||||
@@ -44,13 +41,13 @@ class GetAddAlerts(APIView):
|
|||||||
|
|
||||||
elif any(
|
elif any(
|
||||||
key
|
key
|
||||||
in (
|
in [
|
||||||
"timeFilter",
|
"timeFilter",
|
||||||
"clientFilter",
|
"clientFilter",
|
||||||
"severityFilter",
|
"severityFilter",
|
||||||
"resolvedFilter",
|
"resolvedFilter",
|
||||||
"snoozedFilter",
|
"snoozedFilter",
|
||||||
)
|
]
|
||||||
for key in request.data.keys()
|
for key in request.data.keys()
|
||||||
):
|
):
|
||||||
clientFilter = Q()
|
clientFilter = Q()
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from autotasks.models import TaskResult
|
||||||
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
|
|
||||||
from autotasks.models import TaskResult
|
|
||||||
from tacticalrmm.constants import CustomFieldModel, CustomFieldType, TaskStatus
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
|
|
||||||
@@ -59,7 +62,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data["check_interval"], 20)
|
self.assertEqual(r.data["check_interval"], 20)
|
||||||
self.assertEqual(len(r.data["checks"]), 2)
|
self.assertEquals(len(r.data["checks"]), 2)
|
||||||
|
|
||||||
url = "/api/v3/Maj34ACb324j234asdj2n34kASDjh34-DESKTOPTEST123/checkrunner/"
|
url = "/api/v3/Maj34ACb324j234asdj2n34kASDjh34-DESKTOPTEST123/checkrunner/"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
@@ -67,6 +70,24 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
def test_sysinfo(self):
|
||||||
|
# TODO replace this with golang wmi sample data
|
||||||
|
|
||||||
|
url = "/api/v3/sysinfo/"
|
||||||
|
with open(
|
||||||
|
os.path.join(
|
||||||
|
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
|
||||||
|
)
|
||||||
|
) as f:
|
||||||
|
wmi_py = json.load(f)
|
||||||
|
|
||||||
|
payload = {"agent_id": self.agent.agent_id, "sysinfo": wmi_py}
|
||||||
|
|
||||||
|
r = self.client.patch(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
def test_checkrunner_interval(self):
|
def test_checkrunner_interval(self):
|
||||||
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
|
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
|
||||||
r = self.client.get(url, format="json")
|
r = self.client.get(url, format="json")
|
||||||
@@ -77,7 +98,9 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# add check to agent with check interval set
|
# add check to agent with check interval set
|
||||||
baker.make_recipe("checks.ping_check", agent=self.agent, run_interval=30)
|
check = baker.make_recipe(
|
||||||
|
"checks.ping_check", agent=self.agent, run_interval=30
|
||||||
|
)
|
||||||
|
|
||||||
r = self.client.get(url, format="json")
|
r = self.client.get(url, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
@@ -87,7 +110,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# minimum check run interval is 15 seconds
|
# minimum check run interval is 15 seconds
|
||||||
baker.make_recipe("checks.ping_check", agent=self.agent, run_interval=5)
|
check = baker.make_recipe("checks.ping_check", agent=self.agent, run_interval=5)
|
||||||
|
|
||||||
r = self.client.get(url, format="json")
|
r = self.client.get(url, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
@@ -114,6 +137,8 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
self.assertEqual(len(r.json()["checks"]), 15)
|
self.assertEqual(len(r.json()["checks"]), 15)
|
||||||
|
|
||||||
def test_task_runner_get(self):
|
def test_task_runner_get(self):
|
||||||
|
from autotasks.serializers import TaskGOGetSerializer
|
||||||
|
|
||||||
r = self.client.get("/api/v3/500/asdf9df9dfdf/taskrunner/")
|
r = self.client.get("/api/v3/500/asdf9df9dfdf/taskrunner/")
|
||||||
self.assertEqual(r.status_code, 404)
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
@@ -127,15 +152,8 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
"script": script.id,
|
"script": script.id,
|
||||||
"script_args": ["test"],
|
"script_args": ["test"],
|
||||||
"timeout": 30,
|
"timeout": 30,
|
||||||
"env_vars": ["hello=world", "foo=bar"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "script",
|
|
||||||
"script": 3,
|
|
||||||
"script_args": [],
|
|
||||||
"timeout": 30,
|
|
||||||
"env_vars": ["hello=world", "foo=bar"],
|
|
||||||
},
|
},
|
||||||
|
{"type": "script", "script": 3, "script_args": [], "timeout": 30},
|
||||||
]
|
]
|
||||||
|
|
||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
@@ -145,6 +163,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(TaskGOGetSerializer(task).data, r.data)
|
||||||
|
|
||||||
def test_task_runner_results(self):
|
def test_task_runner_results(self):
|
||||||
from agents.models import AgentCustomField
|
from agents.models import AgentCustomField
|
||||||
@@ -169,9 +188,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(
|
self.assertTrue(TaskResult.objects.get(pk=task_result.pk).status == "passing")
|
||||||
TaskResult.objects.get(pk=task_result.pk).status == TaskStatus.PASSING
|
|
||||||
)
|
|
||||||
|
|
||||||
# test failing task
|
# test failing task
|
||||||
data = {
|
data = {
|
||||||
@@ -183,28 +200,15 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(
|
self.assertTrue(TaskResult.objects.get(pk=task_result.pk).status == "failing")
|
||||||
TaskResult.objects.get(pk=task_result.pk).status == TaskStatus.FAILING
|
|
||||||
)
|
|
||||||
|
|
||||||
# test collector task
|
# test collector task
|
||||||
text = baker.make(
|
text = baker.make("core.CustomField", model="agent", type="text", name="Test")
|
||||||
"core.CustomField",
|
|
||||||
model=CustomFieldModel.AGENT,
|
|
||||||
type=CustomFieldType.TEXT,
|
|
||||||
name="Test",
|
|
||||||
)
|
|
||||||
boolean = baker.make(
|
boolean = baker.make(
|
||||||
"core.CustomField",
|
"core.CustomField", model="agent", type="checkbox", name="Test1"
|
||||||
model=CustomFieldModel.AGENT,
|
|
||||||
type=CustomFieldType.CHECKBOX,
|
|
||||||
name="Test1",
|
|
||||||
)
|
)
|
||||||
multiple = baker.make(
|
multiple = baker.make(
|
||||||
"core.CustomField",
|
"core.CustomField", model="agent", type="multiple", name="Test2"
|
||||||
model=CustomFieldModel.AGENT,
|
|
||||||
type=CustomFieldType.MULTIPLE,
|
|
||||||
name="Test2",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# test text fields
|
# test text fields
|
||||||
@@ -221,9 +225,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(
|
self.assertTrue(TaskResult.objects.get(pk=task_result.pk).status == "failing")
|
||||||
TaskResult.objects.get(pk=task_result.pk).status == TaskStatus.FAILING
|
|
||||||
)
|
|
||||||
|
|
||||||
# test saving to text field
|
# test saving to text field
|
||||||
data = {
|
data = {
|
||||||
@@ -235,9 +237,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(
|
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
|
||||||
TaskResult.objects.get(pk=task_result.pk).status, TaskStatus.PASSING
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
AgentCustomField.objects.get(field=text, agent=task.agent).value,
|
AgentCustomField.objects.get(field=text, agent=task.agent).value,
|
||||||
"the last line",
|
"the last line",
|
||||||
@@ -256,9 +256,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(
|
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
|
||||||
TaskResult.objects.get(pk=task_result.pk).status, TaskStatus.PASSING
|
|
||||||
)
|
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
AgentCustomField.objects.get(field=boolean, agent=task.agent).value
|
AgentCustomField.objects.get(field=boolean, agent=task.agent).value
|
||||||
)
|
)
|
||||||
@@ -276,9 +274,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(
|
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
|
||||||
TaskResult.objects.get(pk=task_result.pk).status, TaskStatus.PASSING
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
AgentCustomField.objects.get(field=multiple, agent=task.agent).value,
|
AgentCustomField.objects.get(field=multiple, agent=task.agent).value,
|
||||||
["this", "is", "an", "array"],
|
["this", "is", "an", "array"],
|
||||||
@@ -294,16 +290,8 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(
|
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
|
||||||
TaskResult.objects.get(pk=task_result.pk).status, TaskStatus.PASSING
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
AgentCustomField.objects.get(field=multiple, agent=task.agent).value,
|
AgentCustomField.objects.get(field=multiple, agent=task.agent).value,
|
||||||
["this"],
|
["this"],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_get_agent_config(self):
|
|
||||||
agent = baker.make_recipe("agents.online_agent")
|
|
||||||
url = f"/api/v3/{agent.agent_id}/config/"
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user