Compare commits
408 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2e6c9795ec | ||
|
|
c6b667f8b3 | ||
|
|
ad4cddb4f3 | ||
|
|
ddba83b993 | ||
|
|
91c33b0431 | ||
|
|
d1df40633a | ||
|
|
7f9fc484e8 | ||
|
|
ecf564648e | ||
|
|
150e3190bc | ||
|
|
63947346e9 | ||
|
|
86816ce357 | ||
|
|
0d34831df4 | ||
|
|
c35da67401 | ||
|
|
fb47022380 | ||
|
|
46c5128418 | ||
|
|
4a5bfee616 | ||
|
|
f8314e0f8e | ||
|
|
9624af4e67 | ||
|
|
5bec4768e7 | ||
|
|
3851b0943a | ||
|
|
cc1f640a50 | ||
|
|
ec0a2dc053 | ||
|
|
a6166a1ad7 | ||
|
|
41e3d1f490 | ||
|
|
2cbecaa552 | ||
|
|
8d543dcc7d | ||
|
|
18b1afe34f | ||
|
|
0f86bbfad8 | ||
|
|
0d021a800a | ||
|
|
038304384a | ||
|
|
2c09ad6b91 | ||
|
|
0bd09d03c1 | ||
|
|
faa0e6c289 | ||
|
|
c28d800d7f | ||
|
|
4fd772ecd8 | ||
|
|
5520a84062 | ||
|
|
66c7123f7c | ||
|
|
bacf4154fd | ||
|
|
61790d2261 | ||
|
|
899111a310 | ||
|
|
3bfa35e1c7 | ||
|
|
ebefcb7fc1 | ||
|
|
ce11685371 | ||
|
|
9edb848947 | ||
|
|
f326096fad | ||
|
|
46f0b23f4f | ||
|
|
1c1d3bd619 | ||
|
|
d894f92d5e | ||
|
|
6c44191fe4 | ||
|
|
0deb78a9af | ||
|
|
9c15f4ba88 | ||
|
|
4ba27ec1d6 | ||
|
|
c8dd80530a | ||
|
|
eda5ea7d1a | ||
|
|
77a916e1a8 | ||
|
|
7ba2a4b27b | ||
|
|
d33f69720a | ||
|
|
59c880dc36 | ||
|
|
e5c355e8f9 | ||
|
|
d36fadf3ca | ||
|
|
b618cbdf7c | ||
|
|
15ec7173aa | ||
|
|
4166e92754 | ||
|
|
85166b6e8b | ||
|
|
5278599675 | ||
|
|
18cac8ba5d | ||
|
|
dfccbceea6 | ||
|
|
fc4b651e46 | ||
|
|
fb89922ecf | ||
|
|
8ab23c8cd9 | ||
|
|
787a2c5071 | ||
|
|
da76a20345 | ||
|
|
9688dbdb36 | ||
|
|
6fa16e1a5e | ||
|
|
71a2e3cfca | ||
|
|
e9c0f7e200 | ||
|
|
25154a4331 | ||
|
|
22c152f600 | ||
|
|
3eab61cbc3 | ||
|
|
a029c1d0db | ||
|
|
706757d215 | ||
|
|
9054c233f4 | ||
|
|
efb0748fc9 | ||
|
|
751b0ef716 | ||
|
|
716450b97e | ||
|
|
2c289a4d8f | ||
|
|
a4ad4c033f | ||
|
|
511bca9d66 | ||
|
|
ac3fb03b2d | ||
|
|
282087d0f3 | ||
|
|
781282599c | ||
|
|
d611ab0ee2 | ||
|
|
411cbdffee | ||
|
|
cfd19e02a7 | ||
|
|
717eeb3903 | ||
|
|
a394fb8757 | ||
|
|
2125a7ffdb | ||
|
|
00c0a6ec60 | ||
|
|
090bcf89ac | ||
|
|
4a768dec48 | ||
|
|
c8d72ddd3b | ||
|
|
5cf618695f | ||
|
|
8a1f497265 | ||
|
|
acdf20f800 | ||
|
|
dbd1003002 | ||
|
|
48db3d3fcc | ||
|
|
41ccd14f25 | ||
|
|
60800df798 | ||
|
|
9c36f2cbc5 | ||
|
|
0b4fff907a | ||
|
|
442f09d0fe | ||
|
|
50af28b2aa | ||
|
|
28ad74a68e | ||
|
|
13cdbae38f | ||
|
|
55c77df5ae | ||
|
|
9b1d2fd985 | ||
|
|
91b7ea0367 | ||
|
|
96d3926d09 | ||
|
|
c709b5a7eb | ||
|
|
df82914005 | ||
|
|
b1bdc38283 | ||
|
|
beb1215329 | ||
|
|
51784388b9 | ||
|
|
dbbbd53a4d | ||
|
|
f9d992c969 | ||
|
|
29a4d61e90 | ||
|
|
2667cdb26c | ||
|
|
a1669a5104 | ||
|
|
059f1bd63d | ||
|
|
82ae5e442c | ||
|
|
b10114cd7c | ||
|
|
33f730aac4 | ||
|
|
92fdfdb05c | ||
|
|
fbaf3f3623 | ||
|
|
5f400bc513 | ||
|
|
0fc59645fc | ||
|
|
e2dee272b8 | ||
|
|
364cf362f4 | ||
|
|
8394a263c4 | ||
|
|
0e9aa26cfc | ||
|
|
6a23d63266 | ||
|
|
af2fc15964 | ||
|
|
5919037a4a | ||
|
|
a761dab229 | ||
|
|
fa656e1f56 | ||
|
|
77e141e84a | ||
|
|
2439965fa8 | ||
|
|
f66afbee90 | ||
|
|
5a89d23a67 | ||
|
|
07c8dad1c3 | ||
|
|
beb8b18e98 | ||
|
|
887bb5d7cc | ||
|
|
4a9542d970 | ||
|
|
c049d9d5ff | ||
|
|
c2cc4389a0 | ||
|
|
12b5011266 | ||
|
|
6e3cad454c | ||
|
|
8251bd028c | ||
|
|
da87d452c2 | ||
|
|
9bca0dfb3c | ||
|
|
57904c4a97 | ||
|
|
4e74d851e9 | ||
|
|
e5c1f69b02 | ||
|
|
9d390d064c | ||
|
|
4994d7892c | ||
|
|
1ea06e3c42 | ||
|
|
a4b7a6dfc7 | ||
|
|
7fe1cce606 | ||
|
|
7e5abe32e0 | ||
|
|
47caf7c142 | ||
|
|
cf4d777344 | ||
|
|
255927c346 | ||
|
|
e8c5fc79a6 | ||
|
|
b309b24d0b | ||
|
|
13f4cca9d5 | ||
|
|
b3c0273e0c | ||
|
|
1df7fdf703 | ||
|
|
cbf38309e2 | ||
|
|
2ec7257dd7 | ||
|
|
531aac6923 | ||
|
|
59b4604c77 | ||
|
|
52aa269af9 | ||
|
|
8a03d9c498 | ||
|
|
a36fc7ecfd | ||
|
|
7b0c269bce | ||
|
|
c10bf9b357 | ||
|
|
0606642953 | ||
|
|
d1b2cae201 | ||
|
|
097e567122 | ||
|
|
d22e1d6a24 | ||
|
|
2827069bd9 | ||
|
|
614e3bd2a0 | ||
|
|
ff756a01d2 | ||
|
|
db14606dbe | ||
|
|
de0a69ede5 | ||
|
|
5bf5065d9a | ||
|
|
0235dadbf7 | ||
|
|
203a15b447 | ||
|
|
fe4dfe2194 | ||
|
|
c2eb93abe0 | ||
|
|
d32b834ae7 | ||
|
|
cecf45a698 | ||
|
|
69cd348cc3 | ||
|
|
868025ffa3 | ||
|
|
60126a8cc5 | ||
|
|
8cfba49559 | ||
|
|
168f053c6f | ||
|
|
897e1d4539 | ||
|
|
5ef6a0f4ea | ||
|
|
eb80e32812 | ||
|
|
620dadafe4 | ||
|
|
e76fa878d2 | ||
|
|
376b421eb9 | ||
|
|
e1643aca80 | ||
|
|
4e97c0c5c9 | ||
|
|
2d51b122af | ||
|
|
05b88a3c73 | ||
|
|
3c087d49e9 | ||
|
|
d81fcccf10 | ||
|
|
ee3a7bbbfc | ||
|
|
82d9e2fb16 | ||
|
|
6ab39d6f70 | ||
|
|
4aa413e697 | ||
|
|
04b3fc54b0 | ||
|
|
e4c5a4e886 | ||
|
|
a0ee7a59eb | ||
|
|
b4a05160df | ||
|
|
1a437b3961 | ||
|
|
bda8555190 | ||
|
|
10ca38f91d | ||
|
|
a468faad20 | ||
|
|
7a20be4aff | ||
|
|
06b974c8a4 | ||
|
|
7284d9fcd8 | ||
|
|
515394049a | ||
|
|
35c8b4f535 | ||
|
|
1a325a66b4 | ||
|
|
7d82116fb9 | ||
|
|
8a7bd4f21b | ||
|
|
2e5a2ef12d | ||
|
|
89aceda65a | ||
|
|
39fd83aa16 | ||
|
|
a23d811fe8 | ||
|
|
a238779724 | ||
|
|
3a848bc037 | ||
|
|
0528ecb454 | ||
|
|
141835593c | ||
|
|
3d06200368 | ||
|
|
729bef9a77 | ||
|
|
94f33bd642 | ||
|
|
7e010cdbca | ||
|
|
8887bcd941 | ||
|
|
56aeeee04c | ||
|
|
98eb3c7287 | ||
|
|
6819c1989b | ||
|
|
7e01dd3e97 | ||
|
|
ea4f2c3de8 | ||
|
|
b2f63b8761 | ||
|
|
65865101ce | ||
|
|
c3637afe69 | ||
|
|
ab543ddf0c | ||
|
|
80595e76e7 | ||
|
|
d49e68737a | ||
|
|
712e15ba80 | ||
|
|
986160e667 | ||
|
|
1ae4e23db1 | ||
|
|
bad646141c | ||
|
|
7911235b68 | ||
|
|
12dee4d14d | ||
|
|
cba841beb8 | ||
|
|
4e3ebf7078 | ||
|
|
1c34969f64 | ||
|
|
dc26cabacd | ||
|
|
a7bffcd471 | ||
|
|
6ae56ac2cc | ||
|
|
03c087020c | ||
|
|
857a1ab9c4 | ||
|
|
64d9530e13 | ||
|
|
5dac1efc30 | ||
|
|
18bc74bc96 | ||
|
|
f64efc63f8 | ||
|
|
e84b897991 | ||
|
|
519647ef93 | ||
|
|
f694fe00e4 | ||
|
|
0b951f27b6 | ||
|
|
8aa082c9df | ||
|
|
f2c5d47bd8 | ||
|
|
ac7642cc15 | ||
|
|
8f34865dab | ||
|
|
c762d12a40 | ||
|
|
fe1e71dc07 | ||
|
|
85b0350ed4 | ||
|
|
a980491455 | ||
|
|
5798c0ccaa | ||
|
|
742f49ca1f | ||
|
|
5560fc805b | ||
|
|
9d4f8a4e8c | ||
|
|
b4d25d6285 | ||
|
|
a504a376bd | ||
|
|
f61ea6e90a | ||
|
|
b2651df36f | ||
|
|
b56c086841 | ||
|
|
0b92fee42e | ||
|
|
4343478c7b | ||
|
|
94649cbfc7 | ||
|
|
fb83f84d84 | ||
|
|
e099a5a32e | ||
|
|
84c2632d40 | ||
|
|
3417ee25eb | ||
|
|
6ada30102c | ||
|
|
ac86ca7266 | ||
|
|
bb1d3edf71 | ||
|
|
97b9253017 | ||
|
|
971c2180c9 | ||
|
|
f96dc6991e | ||
|
|
6855493b2f | ||
|
|
ff0d1f7c42 | ||
|
|
3ae5824761 | ||
|
|
702e865715 | ||
|
|
6bcf64c83f | ||
|
|
18b270c9d0 | ||
|
|
783376acb0 | ||
|
|
81dab470d2 | ||
|
|
a12f0feb66 | ||
|
|
d3c99d9c1c | ||
|
|
3eb3586c0f | ||
|
|
fdde16cf56 | ||
|
|
b8bc5596fd | ||
|
|
47842a79c7 | ||
|
|
391d5bc386 | ||
|
|
ba8561e357 | ||
|
|
6aa1170cef | ||
|
|
6d4363e685 | ||
|
|
6b02b1e1e8 | ||
|
|
df3e68fbaf | ||
|
|
58a5550989 | ||
|
|
ccc9e44ace | ||
|
|
f225c5cf9a | ||
|
|
5c62c7992c | ||
|
|
70b8f09ccb | ||
|
|
abfeafa026 | ||
|
|
aa029b005f | ||
|
|
6cc55e8f36 | ||
|
|
b753d2ca1e | ||
|
|
1e50329c9e | ||
|
|
4942811694 | ||
|
|
59e37e0ccb | ||
|
|
20aa86d8a9 | ||
|
|
64c5ab7042 | ||
|
|
d210f5171a | ||
|
|
c7eee0f14d | ||
|
|
221753b62e | ||
|
|
d213e4d37f | ||
|
|
f8695f21d3 | ||
|
|
4ac1030289 | ||
|
|
93c7117319 | ||
|
|
974afd92ce | ||
|
|
dd1d15f1a4 | ||
|
|
be847baaed | ||
|
|
2b819e6751 | ||
|
|
66247cc005 | ||
|
|
eafd38d3f2 | ||
|
|
c4e590e7a0 | ||
|
|
b92a594114 | ||
|
|
9dfb16f6b8 | ||
|
|
4b74866d85 | ||
|
|
f532c85247 | ||
|
|
b1cc00c1bc | ||
|
|
5696aa49d5 | ||
|
|
e12dc936fd | ||
|
|
6d39a7fb75 | ||
|
|
c87c312349 | ||
|
|
e9c1886cdd | ||
|
|
13e4b1a781 | ||
|
|
3766fb14ef | ||
|
|
29ee50e38b | ||
|
|
d1ab69dc31 | ||
|
|
e3c4a54193 | ||
|
|
2abbd2e3cf | ||
|
|
f9387a5851 | ||
|
|
7a9fb74b54 | ||
|
|
d754f3dd4c | ||
|
|
f54fc9e990 | ||
|
|
8952095da5 | ||
|
|
597240d501 | ||
|
|
7377906d02 | ||
|
|
ce6da1bce3 | ||
|
|
1bf8ff73f8 | ||
|
|
564aaaf3df | ||
|
|
64ba69b2d0 | ||
|
|
ce5ada42af | ||
|
|
1ce5973713 | ||
|
|
b035b53092 | ||
|
|
7d0e02358c | ||
|
|
374ff0aeb5 | ||
|
|
947a43111e | ||
|
|
9970911249 | ||
|
|
5fed81c27b | ||
|
|
dce4f1a5ae | ||
|
|
7e1fc32a1c | ||
|
|
a69f14f504 | ||
|
|
931069458d | ||
|
|
a5259baab0 | ||
|
|
8aaa27350d | ||
|
|
6db6eb70da | ||
|
|
ac74d2b7c2 | ||
|
|
2b316aeae9 | ||
|
|
aff96a45c6 |
@@ -1,11 +1,11 @@
|
||||
# pulls community scripts from git repo
|
||||
FROM python:3.11.6-slim AS GET_SCRIPTS_STAGE
|
||||
FROM python:3.11.8-slim AS GET_SCRIPTS_STAGE
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends git && \
|
||||
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
||||
|
||||
FROM python:3.11.6-slim
|
||||
FROM python:3.11.8-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
@@ -33,12 +33,12 @@ function check_tactical_ready {
|
||||
}
|
||||
|
||||
function django_setup {
|
||||
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||
until (echo >/dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &>/dev/null; do
|
||||
echo "waiting for postgresql container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
until (echo > /dev/tcp/"${MESH_SERVICE}"/4443) &> /dev/null; do
|
||||
until (echo >/dev/tcp/"${MESH_SERVICE}"/4443) &>/dev/null; do
|
||||
echo "waiting for meshcentral container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
@@ -49,8 +49,11 @@ function django_setup {
|
||||
MESH_TOKEN="$(cat ${TACTICAL_DIR}/tmp/mesh_token)"
|
||||
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
|
||||
localvars="$(cat << EOF
|
||||
|
||||
BASE_DOMAIN=$(echo "import tldextract; no_fetch_extract = tldextract.TLDExtract(suffix_list_urls=()); extracted = no_fetch_extract('${API_HOST}'); print(f'{extracted.domain}.{extracted.suffix}')" | python)
|
||||
|
||||
localvars="$(
|
||||
cat <<EOF
|
||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||
|
||||
DEBUG = True
|
||||
@@ -64,11 +67,17 @@ KEY_FILE = '${CERT_PRIV_PATH}'
|
||||
|
||||
SCRIPTS_DIR = '/community-scripts'
|
||||
|
||||
ALLOWED_HOSTS = ['${API_HOST}', '*']
|
||||
|
||||
ADMIN_URL = 'admin/'
|
||||
|
||||
CORS_ORIGIN_ALLOW_ALL = True
|
||||
ALLOWED_HOSTS = ['${API_HOST}', '${APP_HOST}', '*']
|
||||
|
||||
CORS_ORIGIN_WHITELIST = ['https://${APP_HOST}']
|
||||
|
||||
SESSION_COOKIE_DOMAIN = '${BASE_DOMAIN}'
|
||||
CSRF_COOKIE_DOMAIN = '${BASE_DOMAIN}'
|
||||
CSRF_TRUSTED_ORIGINS = ['https://${API_HOST}', 'https://${APP_HOST}']
|
||||
|
||||
HEADLESS_FRONTEND_URLS = {'socialaccount_login_error': 'https://${APP_HOST}/account/provider/callback'}
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
@@ -98,10 +107,11 @@ MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
REDIS_HOST = '${REDIS_HOST}'
|
||||
MESH_WS_URL = '${MESH_WS_URL}'
|
||||
ADMIN_ENABLED = True
|
||||
TRMM_INSECURE = True
|
||||
EOF
|
||||
)"
|
||||
)"
|
||||
|
||||
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
echo "${localvars}" >${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
|
||||
# run migrations and init scripts
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py pre_update_tasks
|
||||
@@ -116,9 +126,8 @@ EOF
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_natsapi_conf
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_installer_user
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py post_update_tasks
|
||||
|
||||
|
||||
# create super user
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
}
|
||||
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -14,11 +14,12 @@ assignees: ''
|
||||
|
||||
**Installation Method:**
|
||||
- [ ] Standard
|
||||
- [ ] Standard with `--insecure` flag at install
|
||||
- [ ] Docker
|
||||
|
||||
**Agent Info (please complete the following information):**
|
||||
- Agent version (as shown in the 'Summary' tab of the agent from web UI):
|
||||
- Agent OS: [e.g. Win 10 v2004, Server 2012 R2]
|
||||
- Agent OS: [e.g. Win 10 v2004, Server 2016]
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
4
.github/workflows/ci-tests.yml
vendored
4
.github/workflows/ci-tests.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
name: Tests
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.11.6"]
|
||||
python-version: ["3.11.8"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: harmon758/postgresql-action@v1
|
||||
with:
|
||||
|
||||
70
.github/workflows/codeql-analysis.yml
vendored
70
.github/workflows/codeql-analysis.yml
vendored
@@ -1,70 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ develop ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ develop ]
|
||||
schedule:
|
||||
- cron: '19 14 * * 6'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'go', 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
20
.github/workflows/docker-build-push.yml
vendored
20
.github/workflows/docker-build-push.yml
vendored
@@ -9,24 +9,24 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get Github Tag
|
||||
id: prep
|
||||
run: |
|
||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/v}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
|
||||
- name: Build and Push Tactical Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
file: ./docker/containers/tactical/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical:${{ steps.prep.outputs.version }},tacticalrmm/tactical:latest
|
||||
|
||||
|
||||
- name: Build and Push Tactical MeshCentral Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
file: ./docker/containers/tactical-meshcentral/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-meshcentral:${{ steps.prep.outputs.version }},tacticalrmm/tactical-meshcentral:latest
|
||||
|
||||
|
||||
- name: Build and Push Tactical NATS Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
file: ./docker/containers/tactical-nats/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
||||
|
||||
|
||||
- name: Build and Push Tactical Frontend Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
file: ./docker/containers/tactical-frontend/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-frontend:${{ steps.prep.outputs.version }},tacticalrmm/tactical-frontend:latest
|
||||
|
||||
|
||||
- name: Build and Push Tactical Nginx Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
|
||||
27
.vscode/settings.json
vendored
27
.vscode/settings.json
vendored
@@ -5,27 +5,10 @@
|
||||
"python.analysis.diagnosticSeverityOverrides": {
|
||||
"reportUnusedImport": "error",
|
||||
"reportDuplicateImport": "error",
|
||||
"reportGeneralTypeIssues": "none"
|
||||
"reportGeneralTypeIssues": "none",
|
||||
"reportOptionalMemberAccess": "none",
|
||||
},
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.mypyArgs": [
|
||||
"--ignore-missing-imports",
|
||||
"--follow-imports=silent",
|
||||
"--show-column-numbers",
|
||||
"--strict"
|
||||
],
|
||||
"python.linting.ignorePatterns": [
|
||||
"**/site-packages/**/*.py",
|
||||
".vscode/*.py",
|
||||
"**env/**"
|
||||
],
|
||||
"python.formatting.provider": "none",
|
||||
//"mypy.targets": [
|
||||
//"api/tacticalrmm"
|
||||
//],
|
||||
//"mypy.runUsingActiveInterpreter": true,
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": true,
|
||||
"editor.formatOnSave": true,
|
||||
@@ -34,7 +17,6 @@
|
||||
"**/docker/**/docker-compose*.yml": "dockercompose"
|
||||
},
|
||||
"files.watcherExclude": {
|
||||
"files.watcherExclude": {
|
||||
"**/.git/objects/**": true,
|
||||
"**/.git/subtree-cache/**": true,
|
||||
"**/node_modules/": true,
|
||||
@@ -53,18 +35,17 @@
|
||||
"**/*.parquet*": true,
|
||||
"**/*.pyc": true,
|
||||
"**/*.zip": true
|
||||
}
|
||||
},
|
||||
"go.useLanguageServer": true,
|
||||
"[go]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": false
|
||||
"source.organizeImports": "never"
|
||||
},
|
||||
"editor.snippetSuggestions": "none"
|
||||
},
|
||||
"[go.mod]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true
|
||||
"source.organizeImports": "explicit"
|
||||
}
|
||||
},
|
||||
"gopls": {
|
||||
|
||||
@@ -8,6 +8,7 @@ Tactical RMM is a remote monitoring & management tool, built with Django and Vue
|
||||
It uses an [agent](https://github.com/amidaware/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://demo.tacticalrmm.com/)
|
||||
|
||||
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||
@@ -19,11 +20,11 @@ Demo database resets every hour. A lot of features are disabled for obvious reas
|
||||
- Teamviewer-like remote desktop control
|
||||
- Real-time remote shell
|
||||
- Remote file browser (download and upload files)
|
||||
- Remote command and script execution (batch, powershell and python scripts)
|
||||
- Remote command and script execution (batch, powershell, python, nushell and deno scripts)
|
||||
- Event log viewer
|
||||
- Services management
|
||||
- Windows patch management
|
||||
- Automated checks with email/SMS alerting (cpu, disk, memory, services, scripts, event logs)
|
||||
- Automated checks with email/SMS/Webhook alerting (cpu, disk, memory, services, scripts, event logs)
|
||||
- Automated task runner (run scripts on a schedule)
|
||||
- Remote software installation via chocolatey
|
||||
- Software and hardware inventory
|
||||
@@ -33,10 +34,12 @@ Demo database resets every hour. A lot of features are disabled for obvious reas
|
||||
- Windows 7, 8.1, 10, 11, Server 2008R2, 2012R2, 2016, 2019, 2022
|
||||
|
||||
## Linux agent versions supported
|
||||
|
||||
- Any distro with systemd which includes but is not limited to: Debian (10, 11), Ubuntu x86_64 (18.04, 20.04, 22.04), Synology 7, centos, freepbx and more!
|
||||
|
||||
## Mac agent versions supported
|
||||
- 64 bit Intel and Apple Silicon (M1, M2)
|
||||
|
||||
- 64 bit Intel and Apple Silicon (M-Series)
|
||||
|
||||
## Installation / Backup / Restore / Usage
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
user: "tactical"
|
||||
python_ver: "3.11.6"
|
||||
python_ver: "3.11.8"
|
||||
go_ver: "1.20.7"
|
||||
backend_repo: "https://github.com/amidaware/tacticalrmm.git"
|
||||
frontend_repo: "https://github.com/amidaware/tacticalrmm-web.git"
|
||||
|
||||
@@ -13,7 +13,7 @@ http {
|
||||
server_tokens off;
|
||||
tcp_nopush on;
|
||||
types_hash_max_size 2048;
|
||||
server_names_hash_bucket_size 64;
|
||||
server_names_hash_bucket_size 256;
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
|
||||
@@ -329,7 +329,7 @@
|
||||
tags: nginx
|
||||
become: yes
|
||||
ansible.builtin.apt_key:
|
||||
url: https://nginx.org/packages/keys/nginx_signing.key
|
||||
url: https://nginx.org/keys/nginx_signing.key
|
||||
state: present
|
||||
|
||||
- name: add nginx repo
|
||||
|
||||
@@ -13,7 +13,6 @@ DATABASES = {
|
||||
'PORT': '5432',
|
||||
}
|
||||
}
|
||||
REDIS_HOST = "localhost"
|
||||
ADMIN_ENABLED = True
|
||||
CERT_FILE = "{{ fullchain_dest }}"
|
||||
KEY_FILE = "{{ privkey_dest }}"
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import subprocess
|
||||
|
||||
import pyotp
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
from tacticalrmm.helpers import get_webdomain
|
||||
from tacticalrmm.util_settings import get_webdomain
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -26,7 +27,7 @@ class Command(BaseCommand):
|
||||
user.save(update_fields=["totp_key"])
|
||||
|
||||
url = pyotp.totp.TOTP(code).provisioning_uri(
|
||||
username, issuer_name=get_webdomain()
|
||||
username, issuer_name=get_webdomain(settings.CORS_ORIGIN_WHITELIST[0])
|
||||
)
|
||||
subprocess.run(f'qr "{url}"', shell=True)
|
||||
self.stdout.write(
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-09 19:57
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0035_role_can_manage_reports_role_can_view_reports"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="role",
|
||||
name="can_ping_agents",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 20:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0036_remove_role_can_ping_agents"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_run_server_scripts",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_use_webterm",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.16 on 2024-10-06 05:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0037_role_can_run_server_scripts_role_can_use_webterm"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_edit_global_keystore",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_view_global_keystore",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,6 @@
|
||||
from typing import Optional
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
@@ -64,6 +65,19 @@ class User(AbstractUser, BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
@property
|
||||
def mesh_user_id(self):
|
||||
return f"user//{self.mesh_username}"
|
||||
|
||||
@property
|
||||
def mesh_username(self):
|
||||
# lower() needed for mesh api
|
||||
return f"{self.username.replace(' ', '').lower()}___{self.pk}"
|
||||
|
||||
@property
|
||||
def is_sso_user(self):
|
||||
return SocialAccount.objects.filter(user_id=self.pk).exists()
|
||||
|
||||
@staticmethod
|
||||
def serialize(user):
|
||||
# serializes the task and returns json
|
||||
@@ -95,7 +109,6 @@ class Role(BaseAuditModel):
|
||||
|
||||
# agents
|
||||
can_list_agents = models.BooleanField(default=False)
|
||||
can_ping_agents = models.BooleanField(default=False)
|
||||
can_use_mesh = models.BooleanField(default=False)
|
||||
can_uninstall_agents = models.BooleanField(default=False)
|
||||
can_update_agents = models.BooleanField(default=False)
|
||||
@@ -121,6 +134,10 @@ class Role(BaseAuditModel):
|
||||
can_run_urlactions = models.BooleanField(default=False)
|
||||
can_view_customfields = models.BooleanField(default=False)
|
||||
can_manage_customfields = models.BooleanField(default=False)
|
||||
can_run_server_scripts = models.BooleanField(default=False)
|
||||
can_use_webterm = models.BooleanField(default=False)
|
||||
can_view_global_keystore = models.BooleanField(default=False)
|
||||
can_edit_global_keystore = models.BooleanField(default=False)
|
||||
|
||||
# checks
|
||||
can_list_checks = models.BooleanField(default=False)
|
||||
@@ -196,7 +213,7 @@ class Role(BaseAuditModel):
|
||||
def save(self, *args, **kwargs) -> None:
|
||||
# delete cache on save
|
||||
cache.delete(f"{ROLE_CACHE_PREFIX}{self.name}")
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def serialize(role):
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.permissions import _has_perm
|
||||
from tacticalrmm.utils import get_core_settings
|
||||
|
||||
|
||||
class AccountsPerms(permissions.BasePermission):
|
||||
@@ -40,3 +41,14 @@ class APIKeyPerms(permissions.BasePermission):
|
||||
return _has_perm(r, "can_list_api_keys")
|
||||
|
||||
return _has_perm(r, "can_manage_api_keys")
|
||||
|
||||
|
||||
class LocalUserPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
settings = get_core_settings()
|
||||
return not settings.block_local_user_logon
|
||||
|
||||
|
||||
class SelfResetSSOPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return not r.user.is_sso_user
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import pyotp
|
||||
from django.conf import settings
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
|
||||
from tacticalrmm.helpers import get_webdomain
|
||||
from tacticalrmm.util_settings import get_webdomain
|
||||
|
||||
from .models import APIKey, Role, User
|
||||
|
||||
@@ -63,7 +64,7 @@ class TOTPSetupSerializer(ModelSerializer):
|
||||
|
||||
def get_qr_url(self, obj):
|
||||
return pyotp.totp.TOTP(obj.totp_key).provisioning_uri(
|
||||
obj.username, issuer_name=get_webdomain()
|
||||
obj.username, issuer_name=get_webdomain(settings.CORS_ORIGIN_WHITELIST[0])
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -11,19 +11,20 @@ from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
class TestAccounts(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.setup_client()
|
||||
self.bob = User(username="bob")
|
||||
self.bob.set_password("hunter2")
|
||||
self.bob.save()
|
||||
|
||||
def test_check_creds(self):
|
||||
url = "/checkcreds/"
|
||||
url = "/v2/checkcreds/"
|
||||
|
||||
data = {"username": "bob", "password": "hunter2"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn("totp", r.data.keys())
|
||||
self.assertEqual(r.data["totp"], "totp not set")
|
||||
self.assertEqual(r.data["totp"], False)
|
||||
|
||||
data = {"username": "bob", "password": "a3asdsa2314"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -40,7 +41,7 @@ class TestAccounts(TacticalTestCase):
|
||||
data = {"username": "bob", "password": "hunter2"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "ok")
|
||||
self.assertEqual(r.data["totp"], True)
|
||||
|
||||
# test user set to block dashboard logins
|
||||
self.bob.block_dashboard_login = True
|
||||
@@ -50,7 +51,7 @@ class TestAccounts(TacticalTestCase):
|
||||
|
||||
@patch("pyotp.TOTP.verify")
|
||||
def test_login_view(self, mock_verify):
|
||||
url = "/login/"
|
||||
url = "/v2/login/"
|
||||
|
||||
mock_verify.return_value = True
|
||||
data = {"username": "bob", "password": "hunter2", "twofactor": "123456"}
|
||||
@@ -404,7 +405,7 @@ class TestTOTPSetup(TacticalTestCase):
|
||||
|
||||
r = self.client.post(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "totp token already set")
|
||||
self.assertEqual(r.data, False)
|
||||
|
||||
|
||||
class TestAPIAuthentication(TacticalTestCase):
|
||||
|
||||
@@ -5,6 +5,10 @@ from . import views
|
||||
urlpatterns = [
|
||||
path("users/", views.GetAddUsers.as_view()),
|
||||
path("<int:pk>/users/", views.GetUpdateDeleteUser.as_view()),
|
||||
path("sessions/<str:pk>/", views.DeleteActiveLoginSession.as_view()),
|
||||
path(
|
||||
"users/<int:pk>/sessions/", views.GetDeleteActiveLoginSessionsPerUser.as_view()
|
||||
),
|
||||
path("users/reset/", views.UserActions.as_view()),
|
||||
path("users/reset_totp/", views.UserActions.as_view()),
|
||||
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.http import HttpRequest
|
||||
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
@@ -16,3 +18,7 @@ def is_root_user(*, request: "HttpRequest", user: "User") -> bool:
|
||||
getattr(settings, "DEMO", False) and request.user.username == settings.ROOT_USER
|
||||
)
|
||||
return root or demo
|
||||
|
||||
|
||||
def is_superuser(user: "User") -> bool:
|
||||
return user.role and getattr(user.role, "is_superuser")
|
||||
|
||||
@@ -1,20 +1,39 @@
|
||||
import datetime
|
||||
|
||||
import pyotp
|
||||
from allauth.socialaccount.models import SocialAccount, SocialApp
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import login
|
||||
from django.db import IntegrityError
|
||||
from django.shortcuts import get_object_or_404
|
||||
from ipware import get_client_ip
|
||||
from django.utils import timezone as djangotime
|
||||
from knox.models import AuthToken
|
||||
from knox.views import LoginView as KnoxLoginView
|
||||
from python_ipware import IpWare
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.utils import is_root_user
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.helpers import notify_error
|
||||
from tacticalrmm.utils import get_core_settings
|
||||
|
||||
from .models import APIKey, Role, User
|
||||
from .permissions import AccountsPerms, APIKeyPerms, RolesPerms
|
||||
from .permissions import (
|
||||
AccountsPerms,
|
||||
APIKeyPerms,
|
||||
LocalUserPerms,
|
||||
RolesPerms,
|
||||
SelfResetSSOPerms,
|
||||
)
|
||||
from .serializers import (
|
||||
APIKeySerializer,
|
||||
RoleSerializer,
|
||||
@@ -22,12 +41,15 @@ from .serializers import (
|
||||
UserSerializer,
|
||||
UserUISerializer,
|
||||
)
|
||||
from accounts.utils import is_root_user
|
||||
|
||||
|
||||
class CheckCreds(KnoxLoginView):
|
||||
class CheckCredsV2(KnoxLoginView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
# restrict time on tokens issued by this view to 3 min
|
||||
def get_token_ttl(self):
|
||||
return datetime.timedelta(seconds=180)
|
||||
|
||||
def post(self, request, format=None):
|
||||
# check credentials
|
||||
serializer = AuthTokenSerializer(data=request.data)
|
||||
@@ -39,20 +61,25 @@ class CheckCreds(KnoxLoginView):
|
||||
|
||||
user = serializer.validated_data["user"]
|
||||
|
||||
if user.block_dashboard_login:
|
||||
if user.block_dashboard_login or user.is_sso_user:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
# block local logon if configured
|
||||
core_settings = get_core_settings()
|
||||
if not user.is_superuser and core_settings.block_local_user_logon:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
# if totp token not set modify response to notify frontend
|
||||
if not user.totp_key:
|
||||
login(request, user)
|
||||
response = super(CheckCreds, self).post(request, format=None)
|
||||
response.data["totp"] = "totp not set"
|
||||
response = super().post(request, format=None)
|
||||
response.data["totp"] = False
|
||||
return response
|
||||
|
||||
return Response("ok")
|
||||
return Response({"totp": True})
|
||||
|
||||
|
||||
class LoginView(KnoxLoginView):
|
||||
class LoginViewV2(KnoxLoginView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def post(self, request, format=None):
|
||||
@@ -65,6 +92,14 @@ class LoginView(KnoxLoginView):
|
||||
if user.block_dashboard_login:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
# block local logon if configured
|
||||
core_settings = get_core_settings()
|
||||
if not user.is_superuser and core_settings.block_local_user_logon:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
if user.is_sso_user:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
token = request.data["twofactor"]
|
||||
totp = pyotp.TOTP(user.totp_key)
|
||||
|
||||
@@ -79,14 +114,20 @@ class LoginView(KnoxLoginView):
|
||||
login(request, user)
|
||||
|
||||
# save ip information
|
||||
client_ip, _ = get_client_ip(request)
|
||||
user.last_login_ip = client_ip
|
||||
user.save()
|
||||
ipw = IpWare()
|
||||
client_ip, _ = ipw.get_client_ip(request.META)
|
||||
if client_ip:
|
||||
user.last_login_ip = str(client_ip)
|
||||
user.save()
|
||||
|
||||
AuditLog.audit_user_login_successful(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return super(LoginView, self).post(request, format=None)
|
||||
response = super().post(request, format=None)
|
||||
response.data["username"] = request.user.username
|
||||
response.data["name"] = None
|
||||
|
||||
return Response(response.data)
|
||||
else:
|
||||
AuditLog.audit_user_failed_twofactor(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
@@ -94,9 +135,100 @@ class LoginView(KnoxLoginView):
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
|
||||
class GetDeleteActiveLoginSessionsPerUser(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
class TokenSerializer(ModelSerializer):
|
||||
user = ReadOnlyField(source="user.username")
|
||||
|
||||
class Meta:
|
||||
model = AuthToken
|
||||
fields = (
|
||||
"digest",
|
||||
"user",
|
||||
"created",
|
||||
"expiry",
|
||||
)
|
||||
|
||||
def get(self, request, pk):
|
||||
tokens = get_object_or_404(User, pk=pk).auth_token_set.filter(
|
||||
expiry__gt=djangotime.now()
|
||||
)
|
||||
|
||||
return Response(self.TokenSerializer(tokens, many=True).data)
|
||||
|
||||
def delete(self, request, pk):
|
||||
tokens = get_object_or_404(User, pk=pk).auth_token_set.filter(
|
||||
expiry__gt=djangotime.now()
|
||||
)
|
||||
|
||||
tokens.delete()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class DeleteActiveLoginSession(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
def delete(self, request, pk):
|
||||
token = get_object_or_404(AuthToken, digest=pk)
|
||||
|
||||
token.delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class GetAddUsers(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
class UserSerializerSSO(ModelSerializer):
|
||||
social_accounts = SerializerMethodField()
|
||||
|
||||
def get_social_accounts(self, obj):
|
||||
accounts = SocialAccount.objects.filter(user_id=obj.pk)
|
||||
|
||||
if accounts:
|
||||
social_accounts = []
|
||||
for account in accounts:
|
||||
try:
|
||||
provider_account = account.get_provider_account()
|
||||
display = provider_account.to_str()
|
||||
except SocialApp.DoesNotExist:
|
||||
display = "Orphaned Provider"
|
||||
except Exception:
|
||||
display = "Unknown"
|
||||
|
||||
social_accounts.append(
|
||||
{
|
||||
"uid": account.uid,
|
||||
"provider": account.provider,
|
||||
"display": display,
|
||||
"last_login": account.last_login,
|
||||
"date_joined": account.date_joined,
|
||||
"extra_data": account.extra_data,
|
||||
}
|
||||
)
|
||||
|
||||
return social_accounts
|
||||
|
||||
return []
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"username",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"email",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"last_login_ip",
|
||||
"role",
|
||||
"block_dashboard_login",
|
||||
"date_format",
|
||||
"social_accounts",
|
||||
]
|
||||
|
||||
def get(self, request):
|
||||
search = request.GET.get("search", None)
|
||||
|
||||
@@ -107,7 +239,7 @@ class GetAddUsers(APIView):
|
||||
else:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False)
|
||||
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
return Response(self.UserSerializerSSO(users, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
# add new user
|
||||
@@ -131,6 +263,7 @@ class GetAddUsers(APIView):
|
||||
user.role = role
|
||||
|
||||
user.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response(user.username)
|
||||
|
||||
|
||||
@@ -151,6 +284,7 @@ class GetUpdateDeleteUser(APIView):
|
||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -160,12 +294,12 @@ class GetUpdateDeleteUser(APIView):
|
||||
return notify_error("The root user cannot be deleted from the UI")
|
||||
|
||||
user.delete()
|
||||
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class UserActions(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
permission_classes = [IsAuthenticated, AccountsPerms, LocalUserPerms]
|
||||
|
||||
# reset password
|
||||
def post(self, request):
|
||||
@@ -202,7 +336,7 @@ class TOTPSetup(APIView):
|
||||
user.save(update_fields=["totp_key"])
|
||||
return Response(TOTPSetupSerializer(user).data)
|
||||
|
||||
return Response("totp token already set")
|
||||
return Response(False)
|
||||
|
||||
|
||||
class UserUI(APIView):
|
||||
@@ -241,11 +375,13 @@ class GetUpdateDeleteRole(APIView):
|
||||
serializer = RoleSerializer(instance=role, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("Role was edited")
|
||||
|
||||
def delete(self, request, pk):
|
||||
role = get_object_or_404(Role, pk=pk)
|
||||
role.delete()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("Role was removed")
|
||||
|
||||
|
||||
@@ -289,7 +425,7 @@ class GetUpdateDeleteAPIKey(APIView):
|
||||
|
||||
|
||||
class ResetPass(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
permission_classes = [IsAuthenticated, SelfResetSSOPerms]
|
||||
|
||||
def put(self, request):
|
||||
user = request.user
|
||||
@@ -299,7 +435,7 @@ class ResetPass(APIView):
|
||||
|
||||
|
||||
class Reset2FA(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
permission_classes = [IsAuthenticated, SelfResetSSOPerms]
|
||||
|
||||
def put(self, request):
|
||||
user = request.user
|
||||
|
||||
633
api/tacticalrmm/agents/migrations/0058_alter_agent_time_zone.py
Normal file
633
api/tacticalrmm/agents/migrations/0058_alter_agent_time_zone.py
Normal file
@@ -0,0 +1,633 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-09 19:56
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("agents", "0057_alter_agentcustomfield_unique_together"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="agent",
|
||||
name="time_zone",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("Africa/Abidjan", "Africa/Abidjan"),
|
||||
("Africa/Accra", "Africa/Accra"),
|
||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
||||
("Africa/Algiers", "Africa/Algiers"),
|
||||
("Africa/Asmara", "Africa/Asmara"),
|
||||
("Africa/Asmera", "Africa/Asmera"),
|
||||
("Africa/Bamako", "Africa/Bamako"),
|
||||
("Africa/Bangui", "Africa/Bangui"),
|
||||
("Africa/Banjul", "Africa/Banjul"),
|
||||
("Africa/Bissau", "Africa/Bissau"),
|
||||
("Africa/Blantyre", "Africa/Blantyre"),
|
||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
||||
("Africa/Cairo", "Africa/Cairo"),
|
||||
("Africa/Casablanca", "Africa/Casablanca"),
|
||||
("Africa/Ceuta", "Africa/Ceuta"),
|
||||
("Africa/Conakry", "Africa/Conakry"),
|
||||
("Africa/Dakar", "Africa/Dakar"),
|
||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
||||
("Africa/Djibouti", "Africa/Djibouti"),
|
||||
("Africa/Douala", "Africa/Douala"),
|
||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
||||
("Africa/Freetown", "Africa/Freetown"),
|
||||
("Africa/Gaborone", "Africa/Gaborone"),
|
||||
("Africa/Harare", "Africa/Harare"),
|
||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
||||
("Africa/Juba", "Africa/Juba"),
|
||||
("Africa/Kampala", "Africa/Kampala"),
|
||||
("Africa/Khartoum", "Africa/Khartoum"),
|
||||
("Africa/Kigali", "Africa/Kigali"),
|
||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
||||
("Africa/Lagos", "Africa/Lagos"),
|
||||
("Africa/Libreville", "Africa/Libreville"),
|
||||
("Africa/Lome", "Africa/Lome"),
|
||||
("Africa/Luanda", "Africa/Luanda"),
|
||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
||||
("Africa/Lusaka", "Africa/Lusaka"),
|
||||
("Africa/Malabo", "Africa/Malabo"),
|
||||
("Africa/Maputo", "Africa/Maputo"),
|
||||
("Africa/Maseru", "Africa/Maseru"),
|
||||
("Africa/Mbabane", "Africa/Mbabane"),
|
||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
||||
("Africa/Monrovia", "Africa/Monrovia"),
|
||||
("Africa/Nairobi", "Africa/Nairobi"),
|
||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
||||
("Africa/Niamey", "Africa/Niamey"),
|
||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
||||
("Africa/Tripoli", "Africa/Tripoli"),
|
||||
("Africa/Tunis", "Africa/Tunis"),
|
||||
("Africa/Windhoek", "Africa/Windhoek"),
|
||||
("America/Adak", "America/Adak"),
|
||||
("America/Anchorage", "America/Anchorage"),
|
||||
("America/Anguilla", "America/Anguilla"),
|
||||
("America/Antigua", "America/Antigua"),
|
||||
("America/Araguaina", "America/Araguaina"),
|
||||
(
|
||||
"America/Argentina/Buenos_Aires",
|
||||
"America/Argentina/Buenos_Aires",
|
||||
),
|
||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
||||
(
|
||||
"America/Argentina/ComodRivadavia",
|
||||
"America/Argentina/ComodRivadavia",
|
||||
),
|
||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
||||
(
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
),
|
||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
||||
("America/Aruba", "America/Aruba"),
|
||||
("America/Asuncion", "America/Asuncion"),
|
||||
("America/Atikokan", "America/Atikokan"),
|
||||
("America/Atka", "America/Atka"),
|
||||
("America/Bahia", "America/Bahia"),
|
||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
||||
("America/Barbados", "America/Barbados"),
|
||||
("America/Belem", "America/Belem"),
|
||||
("America/Belize", "America/Belize"),
|
||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
||||
("America/Boa_Vista", "America/Boa_Vista"),
|
||||
("America/Bogota", "America/Bogota"),
|
||||
("America/Boise", "America/Boise"),
|
||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
||||
("America/Campo_Grande", "America/Campo_Grande"),
|
||||
("America/Cancun", "America/Cancun"),
|
||||
("America/Caracas", "America/Caracas"),
|
||||
("America/Catamarca", "America/Catamarca"),
|
||||
("America/Cayenne", "America/Cayenne"),
|
||||
("America/Cayman", "America/Cayman"),
|
||||
("America/Chicago", "America/Chicago"),
|
||||
("America/Chihuahua", "America/Chihuahua"),
|
||||
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
|
||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
||||
("America/Cordoba", "America/Cordoba"),
|
||||
("America/Costa_Rica", "America/Costa_Rica"),
|
||||
("America/Creston", "America/Creston"),
|
||||
("America/Cuiaba", "America/Cuiaba"),
|
||||
("America/Curacao", "America/Curacao"),
|
||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
||||
("America/Dawson", "America/Dawson"),
|
||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
||||
("America/Denver", "America/Denver"),
|
||||
("America/Detroit", "America/Detroit"),
|
||||
("America/Dominica", "America/Dominica"),
|
||||
("America/Edmonton", "America/Edmonton"),
|
||||
("America/Eirunepe", "America/Eirunepe"),
|
||||
("America/El_Salvador", "America/El_Salvador"),
|
||||
("America/Ensenada", "America/Ensenada"),
|
||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
||||
("America/Fortaleza", "America/Fortaleza"),
|
||||
("America/Glace_Bay", "America/Glace_Bay"),
|
||||
("America/Godthab", "America/Godthab"),
|
||||
("America/Goose_Bay", "America/Goose_Bay"),
|
||||
("America/Grand_Turk", "America/Grand_Turk"),
|
||||
("America/Grenada", "America/Grenada"),
|
||||
("America/Guadeloupe", "America/Guadeloupe"),
|
||||
("America/Guatemala", "America/Guatemala"),
|
||||
("America/Guayaquil", "America/Guayaquil"),
|
||||
("America/Guyana", "America/Guyana"),
|
||||
("America/Halifax", "America/Halifax"),
|
||||
("America/Havana", "America/Havana"),
|
||||
("America/Hermosillo", "America/Hermosillo"),
|
||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
||||
("America/Indianapolis", "America/Indianapolis"),
|
||||
("America/Inuvik", "America/Inuvik"),
|
||||
("America/Iqaluit", "America/Iqaluit"),
|
||||
("America/Jamaica", "America/Jamaica"),
|
||||
("America/Jujuy", "America/Jujuy"),
|
||||
("America/Juneau", "America/Juneau"),
|
||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
||||
("America/Knox_IN", "America/Knox_IN"),
|
||||
("America/Kralendijk", "America/Kralendijk"),
|
||||
("America/La_Paz", "America/La_Paz"),
|
||||
("America/Lima", "America/Lima"),
|
||||
("America/Los_Angeles", "America/Los_Angeles"),
|
||||
("America/Louisville", "America/Louisville"),
|
||||
("America/Lower_Princes", "America/Lower_Princes"),
|
||||
("America/Maceio", "America/Maceio"),
|
||||
("America/Managua", "America/Managua"),
|
||||
("America/Manaus", "America/Manaus"),
|
||||
("America/Marigot", "America/Marigot"),
|
||||
("America/Martinique", "America/Martinique"),
|
||||
("America/Matamoros", "America/Matamoros"),
|
||||
("America/Mazatlan", "America/Mazatlan"),
|
||||
("America/Mendoza", "America/Mendoza"),
|
||||
("America/Menominee", "America/Menominee"),
|
||||
("America/Merida", "America/Merida"),
|
||||
("America/Metlakatla", "America/Metlakatla"),
|
||||
("America/Mexico_City", "America/Mexico_City"),
|
||||
("America/Miquelon", "America/Miquelon"),
|
||||
("America/Moncton", "America/Moncton"),
|
||||
("America/Monterrey", "America/Monterrey"),
|
||||
("America/Montevideo", "America/Montevideo"),
|
||||
("America/Montreal", "America/Montreal"),
|
||||
("America/Montserrat", "America/Montserrat"),
|
||||
("America/Nassau", "America/Nassau"),
|
||||
("America/New_York", "America/New_York"),
|
||||
("America/Nipigon", "America/Nipigon"),
|
||||
("America/Nome", "America/Nome"),
|
||||
("America/Noronha", "America/Noronha"),
|
||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
||||
(
|
||||
"America/North_Dakota/New_Salem",
|
||||
"America/North_Dakota/New_Salem",
|
||||
),
|
||||
("America/Nuuk", "America/Nuuk"),
|
||||
("America/Ojinaga", "America/Ojinaga"),
|
||||
("America/Panama", "America/Panama"),
|
||||
("America/Pangnirtung", "America/Pangnirtung"),
|
||||
("America/Paramaribo", "America/Paramaribo"),
|
||||
("America/Phoenix", "America/Phoenix"),
|
||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
||||
("America/Porto_Acre", "America/Porto_Acre"),
|
||||
("America/Porto_Velho", "America/Porto_Velho"),
|
||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
||||
("America/Rainy_River", "America/Rainy_River"),
|
||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
||||
("America/Recife", "America/Recife"),
|
||||
("America/Regina", "America/Regina"),
|
||||
("America/Resolute", "America/Resolute"),
|
||||
("America/Rio_Branco", "America/Rio_Branco"),
|
||||
("America/Rosario", "America/Rosario"),
|
||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
||||
("America/Santarem", "America/Santarem"),
|
||||
("America/Santiago", "America/Santiago"),
|
||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
||||
("America/Scoresbysund", "America/Scoresbysund"),
|
||||
("America/Shiprock", "America/Shiprock"),
|
||||
("America/Sitka", "America/Sitka"),
|
||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
||||
("America/St_Johns", "America/St_Johns"),
|
||||
("America/St_Kitts", "America/St_Kitts"),
|
||||
("America/St_Lucia", "America/St_Lucia"),
|
||||
("America/St_Thomas", "America/St_Thomas"),
|
||||
("America/St_Vincent", "America/St_Vincent"),
|
||||
("America/Swift_Current", "America/Swift_Current"),
|
||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
||||
("America/Thule", "America/Thule"),
|
||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
||||
("America/Tijuana", "America/Tijuana"),
|
||||
("America/Toronto", "America/Toronto"),
|
||||
("America/Tortola", "America/Tortola"),
|
||||
("America/Vancouver", "America/Vancouver"),
|
||||
("America/Virgin", "America/Virgin"),
|
||||
("America/Whitehorse", "America/Whitehorse"),
|
||||
("America/Winnipeg", "America/Winnipeg"),
|
||||
("America/Yakutat", "America/Yakutat"),
|
||||
("America/Yellowknife", "America/Yellowknife"),
|
||||
("Antarctica/Casey", "Antarctica/Casey"),
|
||||
("Antarctica/Davis", "Antarctica/Davis"),
|
||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
||||
("Antarctica/Troll", "Antarctica/Troll"),
|
||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
||||
("Asia/Aden", "Asia/Aden"),
|
||||
("Asia/Almaty", "Asia/Almaty"),
|
||||
("Asia/Amman", "Asia/Amman"),
|
||||
("Asia/Anadyr", "Asia/Anadyr"),
|
||||
("Asia/Aqtau", "Asia/Aqtau"),
|
||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
||||
("Asia/Atyrau", "Asia/Atyrau"),
|
||||
("Asia/Baghdad", "Asia/Baghdad"),
|
||||
("Asia/Bahrain", "Asia/Bahrain"),
|
||||
("Asia/Baku", "Asia/Baku"),
|
||||
("Asia/Bangkok", "Asia/Bangkok"),
|
||||
("Asia/Barnaul", "Asia/Barnaul"),
|
||||
("Asia/Beirut", "Asia/Beirut"),
|
||||
("Asia/Bishkek", "Asia/Bishkek"),
|
||||
("Asia/Brunei", "Asia/Brunei"),
|
||||
("Asia/Calcutta", "Asia/Calcutta"),
|
||||
("Asia/Chita", "Asia/Chita"),
|
||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
||||
("Asia/Chongqing", "Asia/Chongqing"),
|
||||
("Asia/Chungking", "Asia/Chungking"),
|
||||
("Asia/Colombo", "Asia/Colombo"),
|
||||
("Asia/Dacca", "Asia/Dacca"),
|
||||
("Asia/Damascus", "Asia/Damascus"),
|
||||
("Asia/Dhaka", "Asia/Dhaka"),
|
||||
("Asia/Dili", "Asia/Dili"),
|
||||
("Asia/Dubai", "Asia/Dubai"),
|
||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
||||
("Asia/Famagusta", "Asia/Famagusta"),
|
||||
("Asia/Gaza", "Asia/Gaza"),
|
||||
("Asia/Harbin", "Asia/Harbin"),
|
||||
("Asia/Hebron", "Asia/Hebron"),
|
||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
||||
("Asia/Hovd", "Asia/Hovd"),
|
||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
||||
("Asia/Istanbul", "Asia/Istanbul"),
|
||||
("Asia/Jakarta", "Asia/Jakarta"),
|
||||
("Asia/Jayapura", "Asia/Jayapura"),
|
||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
||||
("Asia/Kabul", "Asia/Kabul"),
|
||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
||||
("Asia/Karachi", "Asia/Karachi"),
|
||||
("Asia/Kashgar", "Asia/Kashgar"),
|
||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
||||
("Asia/Katmandu", "Asia/Katmandu"),
|
||||
("Asia/Khandyga", "Asia/Khandyga"),
|
||||
("Asia/Kolkata", "Asia/Kolkata"),
|
||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
||||
("Asia/Kuching", "Asia/Kuching"),
|
||||
("Asia/Kuwait", "Asia/Kuwait"),
|
||||
("Asia/Macao", "Asia/Macao"),
|
||||
("Asia/Macau", "Asia/Macau"),
|
||||
("Asia/Magadan", "Asia/Magadan"),
|
||||
("Asia/Makassar", "Asia/Makassar"),
|
||||
("Asia/Manila", "Asia/Manila"),
|
||||
("Asia/Muscat", "Asia/Muscat"),
|
||||
("Asia/Nicosia", "Asia/Nicosia"),
|
||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
||||
("Asia/Omsk", "Asia/Omsk"),
|
||||
("Asia/Oral", "Asia/Oral"),
|
||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
||||
("Asia/Pontianak", "Asia/Pontianak"),
|
||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
||||
("Asia/Qatar", "Asia/Qatar"),
|
||||
("Asia/Qostanay", "Asia/Qostanay"),
|
||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
||||
("Asia/Rangoon", "Asia/Rangoon"),
|
||||
("Asia/Riyadh", "Asia/Riyadh"),
|
||||
("Asia/Saigon", "Asia/Saigon"),
|
||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
||||
("Asia/Samarkand", "Asia/Samarkand"),
|
||||
("Asia/Seoul", "Asia/Seoul"),
|
||||
("Asia/Shanghai", "Asia/Shanghai"),
|
||||
("Asia/Singapore", "Asia/Singapore"),
|
||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
||||
("Asia/Taipei", "Asia/Taipei"),
|
||||
("Asia/Tashkent", "Asia/Tashkent"),
|
||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
||||
("Asia/Tehran", "Asia/Tehran"),
|
||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
||||
("Asia/Thimbu", "Asia/Thimbu"),
|
||||
("Asia/Thimphu", "Asia/Thimphu"),
|
||||
("Asia/Tokyo", "Asia/Tokyo"),
|
||||
("Asia/Tomsk", "Asia/Tomsk"),
|
||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
||||
("Asia/Urumqi", "Asia/Urumqi"),
|
||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
||||
("Asia/Vientiane", "Asia/Vientiane"),
|
||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
||||
("Asia/Yangon", "Asia/Yangon"),
|
||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
||||
("Asia/Yerevan", "Asia/Yerevan"),
|
||||
("Atlantic/Azores", "Atlantic/Azores"),
|
||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
||||
("Atlantic/Canary", "Atlantic/Canary"),
|
||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
||||
("Australia/ACT", "Australia/ACT"),
|
||||
("Australia/Adelaide", "Australia/Adelaide"),
|
||||
("Australia/Brisbane", "Australia/Brisbane"),
|
||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
||||
("Australia/Canberra", "Australia/Canberra"),
|
||||
("Australia/Currie", "Australia/Currie"),
|
||||
("Australia/Darwin", "Australia/Darwin"),
|
||||
("Australia/Eucla", "Australia/Eucla"),
|
||||
("Australia/Hobart", "Australia/Hobart"),
|
||||
("Australia/LHI", "Australia/LHI"),
|
||||
("Australia/Lindeman", "Australia/Lindeman"),
|
||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
||||
("Australia/Melbourne", "Australia/Melbourne"),
|
||||
("Australia/NSW", "Australia/NSW"),
|
||||
("Australia/North", "Australia/North"),
|
||||
("Australia/Perth", "Australia/Perth"),
|
||||
("Australia/Queensland", "Australia/Queensland"),
|
||||
("Australia/South", "Australia/South"),
|
||||
("Australia/Sydney", "Australia/Sydney"),
|
||||
("Australia/Tasmania", "Australia/Tasmania"),
|
||||
("Australia/Victoria", "Australia/Victoria"),
|
||||
("Australia/West", "Australia/West"),
|
||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
||||
("Brazil/Acre", "Brazil/Acre"),
|
||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
||||
("Brazil/East", "Brazil/East"),
|
||||
("Brazil/West", "Brazil/West"),
|
||||
("CET", "CET"),
|
||||
("CST6CDT", "CST6CDT"),
|
||||
("Canada/Atlantic", "Canada/Atlantic"),
|
||||
("Canada/Central", "Canada/Central"),
|
||||
("Canada/Eastern", "Canada/Eastern"),
|
||||
("Canada/Mountain", "Canada/Mountain"),
|
||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
||||
("Canada/Pacific", "Canada/Pacific"),
|
||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
||||
("Canada/Yukon", "Canada/Yukon"),
|
||||
("Chile/Continental", "Chile/Continental"),
|
||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
||||
("Cuba", "Cuba"),
|
||||
("EET", "EET"),
|
||||
("EST", "EST"),
|
||||
("EST5EDT", "EST5EDT"),
|
||||
("Egypt", "Egypt"),
|
||||
("Eire", "Eire"),
|
||||
("Etc/GMT", "Etc/GMT"),
|
||||
("Etc/GMT+0", "Etc/GMT+0"),
|
||||
("Etc/GMT+1", "Etc/GMT+1"),
|
||||
("Etc/GMT+10", "Etc/GMT+10"),
|
||||
("Etc/GMT+11", "Etc/GMT+11"),
|
||||
("Etc/GMT+12", "Etc/GMT+12"),
|
||||
("Etc/GMT+2", "Etc/GMT+2"),
|
||||
("Etc/GMT+3", "Etc/GMT+3"),
|
||||
("Etc/GMT+4", "Etc/GMT+4"),
|
||||
("Etc/GMT+5", "Etc/GMT+5"),
|
||||
("Etc/GMT+6", "Etc/GMT+6"),
|
||||
("Etc/GMT+7", "Etc/GMT+7"),
|
||||
("Etc/GMT+8", "Etc/GMT+8"),
|
||||
("Etc/GMT+9", "Etc/GMT+9"),
|
||||
("Etc/GMT-0", "Etc/GMT-0"),
|
||||
("Etc/GMT-1", "Etc/GMT-1"),
|
||||
("Etc/GMT-10", "Etc/GMT-10"),
|
||||
("Etc/GMT-11", "Etc/GMT-11"),
|
||||
("Etc/GMT-12", "Etc/GMT-12"),
|
||||
("Etc/GMT-13", "Etc/GMT-13"),
|
||||
("Etc/GMT-14", "Etc/GMT-14"),
|
||||
("Etc/GMT-2", "Etc/GMT-2"),
|
||||
("Etc/GMT-3", "Etc/GMT-3"),
|
||||
("Etc/GMT-4", "Etc/GMT-4"),
|
||||
("Etc/GMT-5", "Etc/GMT-5"),
|
||||
("Etc/GMT-6", "Etc/GMT-6"),
|
||||
("Etc/GMT-7", "Etc/GMT-7"),
|
||||
("Etc/GMT-8", "Etc/GMT-8"),
|
||||
("Etc/GMT-9", "Etc/GMT-9"),
|
||||
("Etc/GMT0", "Etc/GMT0"),
|
||||
("Etc/Greenwich", "Etc/Greenwich"),
|
||||
("Etc/UCT", "Etc/UCT"),
|
||||
("Etc/UTC", "Etc/UTC"),
|
||||
("Etc/Universal", "Etc/Universal"),
|
||||
("Etc/Zulu", "Etc/Zulu"),
|
||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
||||
("Europe/Andorra", "Europe/Andorra"),
|
||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
||||
("Europe/Athens", "Europe/Athens"),
|
||||
("Europe/Belfast", "Europe/Belfast"),
|
||||
("Europe/Belgrade", "Europe/Belgrade"),
|
||||
("Europe/Berlin", "Europe/Berlin"),
|
||||
("Europe/Bratislava", "Europe/Bratislava"),
|
||||
("Europe/Brussels", "Europe/Brussels"),
|
||||
("Europe/Bucharest", "Europe/Bucharest"),
|
||||
("Europe/Budapest", "Europe/Budapest"),
|
||||
("Europe/Busingen", "Europe/Busingen"),
|
||||
("Europe/Chisinau", "Europe/Chisinau"),
|
||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
||||
("Europe/Dublin", "Europe/Dublin"),
|
||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
||||
("Europe/Guernsey", "Europe/Guernsey"),
|
||||
("Europe/Helsinki", "Europe/Helsinki"),
|
||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
||||
("Europe/Istanbul", "Europe/Istanbul"),
|
||||
("Europe/Jersey", "Europe/Jersey"),
|
||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
||||
("Europe/Kiev", "Europe/Kiev"),
|
||||
("Europe/Kirov", "Europe/Kirov"),
|
||||
("Europe/Kyiv", "Europe/Kyiv"),
|
||||
("Europe/Lisbon", "Europe/Lisbon"),
|
||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
||||
("Europe/London", "Europe/London"),
|
||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
||||
("Europe/Madrid", "Europe/Madrid"),
|
||||
("Europe/Malta", "Europe/Malta"),
|
||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
||||
("Europe/Minsk", "Europe/Minsk"),
|
||||
("Europe/Monaco", "Europe/Monaco"),
|
||||
("Europe/Moscow", "Europe/Moscow"),
|
||||
("Europe/Nicosia", "Europe/Nicosia"),
|
||||
("Europe/Oslo", "Europe/Oslo"),
|
||||
("Europe/Paris", "Europe/Paris"),
|
||||
("Europe/Podgorica", "Europe/Podgorica"),
|
||||
("Europe/Prague", "Europe/Prague"),
|
||||
("Europe/Riga", "Europe/Riga"),
|
||||
("Europe/Rome", "Europe/Rome"),
|
||||
("Europe/Samara", "Europe/Samara"),
|
||||
("Europe/San_Marino", "Europe/San_Marino"),
|
||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
||||
("Europe/Saratov", "Europe/Saratov"),
|
||||
("Europe/Simferopol", "Europe/Simferopol"),
|
||||
("Europe/Skopje", "Europe/Skopje"),
|
||||
("Europe/Sofia", "Europe/Sofia"),
|
||||
("Europe/Stockholm", "Europe/Stockholm"),
|
||||
("Europe/Tallinn", "Europe/Tallinn"),
|
||||
("Europe/Tirane", "Europe/Tirane"),
|
||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
||||
("Europe/Vaduz", "Europe/Vaduz"),
|
||||
("Europe/Vatican", "Europe/Vatican"),
|
||||
("Europe/Vienna", "Europe/Vienna"),
|
||||
("Europe/Vilnius", "Europe/Vilnius"),
|
||||
("Europe/Volgograd", "Europe/Volgograd"),
|
||||
("Europe/Warsaw", "Europe/Warsaw"),
|
||||
("Europe/Zagreb", "Europe/Zagreb"),
|
||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
||||
("Europe/Zurich", "Europe/Zurich"),
|
||||
("Factory", "Factory"),
|
||||
("GB", "GB"),
|
||||
("GB-Eire", "GB-Eire"),
|
||||
("GMT", "GMT"),
|
||||
("GMT+0", "GMT+0"),
|
||||
("GMT-0", "GMT-0"),
|
||||
("GMT0", "GMT0"),
|
||||
("Greenwich", "Greenwich"),
|
||||
("HST", "HST"),
|
||||
("Hongkong", "Hongkong"),
|
||||
("Iceland", "Iceland"),
|
||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
||||
("Indian/Chagos", "Indian/Chagos"),
|
||||
("Indian/Christmas", "Indian/Christmas"),
|
||||
("Indian/Cocos", "Indian/Cocos"),
|
||||
("Indian/Comoro", "Indian/Comoro"),
|
||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
||||
("Indian/Mahe", "Indian/Mahe"),
|
||||
("Indian/Maldives", "Indian/Maldives"),
|
||||
("Indian/Mauritius", "Indian/Mauritius"),
|
||||
("Indian/Mayotte", "Indian/Mayotte"),
|
||||
("Indian/Reunion", "Indian/Reunion"),
|
||||
("Iran", "Iran"),
|
||||
("Israel", "Israel"),
|
||||
("Jamaica", "Jamaica"),
|
||||
("Japan", "Japan"),
|
||||
("Kwajalein", "Kwajalein"),
|
||||
("Libya", "Libya"),
|
||||
("MET", "MET"),
|
||||
("MST", "MST"),
|
||||
("MST7MDT", "MST7MDT"),
|
||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
||||
("Mexico/General", "Mexico/General"),
|
||||
("NZ", "NZ"),
|
||||
("NZ-CHAT", "NZ-CHAT"),
|
||||
("Navajo", "Navajo"),
|
||||
("PRC", "PRC"),
|
||||
("PST8PDT", "PST8PDT"),
|
||||
("Pacific/Apia", "Pacific/Apia"),
|
||||
("Pacific/Auckland", "Pacific/Auckland"),
|
||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
||||
("Pacific/Chatham", "Pacific/Chatham"),
|
||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
||||
("Pacific/Easter", "Pacific/Easter"),
|
||||
("Pacific/Efate", "Pacific/Efate"),
|
||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
||||
("Pacific/Fiji", "Pacific/Fiji"),
|
||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
||||
("Pacific/Gambier", "Pacific/Gambier"),
|
||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
||||
("Pacific/Guam", "Pacific/Guam"),
|
||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
||||
("Pacific/Johnston", "Pacific/Johnston"),
|
||||
("Pacific/Kanton", "Pacific/Kanton"),
|
||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
||||
("Pacific/Majuro", "Pacific/Majuro"),
|
||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
||||
("Pacific/Midway", "Pacific/Midway"),
|
||||
("Pacific/Nauru", "Pacific/Nauru"),
|
||||
("Pacific/Niue", "Pacific/Niue"),
|
||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
||||
("Pacific/Noumea", "Pacific/Noumea"),
|
||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
||||
("Pacific/Palau", "Pacific/Palau"),
|
||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
||||
("Pacific/Ponape", "Pacific/Ponape"),
|
||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
||||
("Pacific/Saipan", "Pacific/Saipan"),
|
||||
("Pacific/Samoa", "Pacific/Samoa"),
|
||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
||||
("Pacific/Truk", "Pacific/Truk"),
|
||||
("Pacific/Wake", "Pacific/Wake"),
|
||||
("Pacific/Wallis", "Pacific/Wallis"),
|
||||
("Pacific/Yap", "Pacific/Yap"),
|
||||
("Poland", "Poland"),
|
||||
("Portugal", "Portugal"),
|
||||
("ROC", "ROC"),
|
||||
("ROK", "ROK"),
|
||||
("Singapore", "Singapore"),
|
||||
("Turkey", "Turkey"),
|
||||
("UCT", "UCT"),
|
||||
("US/Alaska", "US/Alaska"),
|
||||
("US/Aleutian", "US/Aleutian"),
|
||||
("US/Arizona", "US/Arizona"),
|
||||
("US/Central", "US/Central"),
|
||||
("US/East-Indiana", "US/East-Indiana"),
|
||||
("US/Eastern", "US/Eastern"),
|
||||
("US/Hawaii", "US/Hawaii"),
|
||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
||||
("US/Michigan", "US/Michigan"),
|
||||
("US/Mountain", "US/Mountain"),
|
||||
("US/Pacific", "US/Pacific"),
|
||||
("US/Samoa", "US/Samoa"),
|
||||
("UTC", "UTC"),
|
||||
("Universal", "Universal"),
|
||||
("W-SU", "W-SU"),
|
||||
("WET", "WET"),
|
||||
("Zulu", "Zulu"),
|
||||
("localtime", "localtime"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0058_alter_agent_time_zone"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="agenthistory",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,36 @@
|
||||
# Generated by Django 4.2.16 on 2024-10-05 20:39
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0047_alter_coresettings_notify_on_warning_alerts"),
|
||||
("agents", "0059_alter_agenthistory_id"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="agenthistory",
|
||||
name="collector_all_output",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="agenthistory",
|
||||
name="custom_field",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="history",
|
||||
to="core.customfield",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="agenthistory",
|
||||
name="save_to_agent_note",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
from collections import Counter
|
||||
from contextlib import suppress
|
||||
@@ -7,7 +8,6 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, ca
|
||||
import msgpack
|
||||
import nats
|
||||
import validators
|
||||
from asgiref.sync import sync_to_async
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.cache import cache
|
||||
@@ -20,7 +20,7 @@ from packaging.version import Version as LooseVersion
|
||||
from agents.utils import get_agent_url
|
||||
from checks.models import CheckResult
|
||||
from core.models import TZ_CHOICES
|
||||
from core.utils import get_core_settings, send_command_with_mesh
|
||||
from core.utils import _b64_to_hex, get_core_settings, send_command_with_mesh
|
||||
from logs.models import BaseAuditModel, DebugLog, PendingAction
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_STATUS_OFFLINE,
|
||||
@@ -40,7 +40,7 @@ from tacticalrmm.constants import (
|
||||
PAAction,
|
||||
PAStatus,
|
||||
)
|
||||
from tacticalrmm.helpers import setup_nats_options
|
||||
from tacticalrmm.helpers import has_script_actions, has_webhook, setup_nats_options
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -54,6 +54,8 @@ if TYPE_CHECKING:
|
||||
# type helpers
|
||||
Disk = Union[Dict[str, Any], str]
|
||||
|
||||
logger = logging.getLogger("trmm")
|
||||
|
||||
|
||||
class Agent(BaseAuditModel):
|
||||
class Meta:
|
||||
@@ -124,6 +126,27 @@ class Agent(BaseAuditModel):
|
||||
def __str__(self) -> str:
|
||||
return self.hostname
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# prevent recursion since calling set_alert_template() also calls save()
|
||||
if not hasattr(self, "_processing_set_alert_template"):
|
||||
self._processing_set_alert_template = False
|
||||
|
||||
if self.pk and not self._processing_set_alert_template:
|
||||
orig = Agent.objects.get(pk=self.pk)
|
||||
mon_type_changed = self.monitoring_type != orig.monitoring_type
|
||||
site_changed = self.site_id != orig.site_id
|
||||
policy_changed = self.policy != orig.policy
|
||||
block_inherit = (
|
||||
self.block_policy_inheritance != orig.block_policy_inheritance
|
||||
)
|
||||
|
||||
if mon_type_changed or site_changed or policy_changed or block_inherit:
|
||||
self._processing_set_alert_template = True
|
||||
self.set_alert_template()
|
||||
self._processing_set_alert_template = False
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def client(self) -> "Client":
|
||||
return self.site.client
|
||||
@@ -280,7 +303,20 @@ class Agent(BaseAuditModel):
|
||||
try:
|
||||
cpus = self.wmi_detail["cpu"]
|
||||
for cpu in cpus:
|
||||
ret.append([x["Name"] for x in cpu if "Name" in x][0])
|
||||
name = [x["Name"] for x in cpu if "Name" in x][0]
|
||||
lp, nc = "", ""
|
||||
with suppress(Exception):
|
||||
lp = [
|
||||
x["NumberOfLogicalProcessors"]
|
||||
for x in cpu
|
||||
if "NumberOfCores" in x
|
||||
][0]
|
||||
nc = [x["NumberOfCores"] for x in cpu if "NumberOfCores" in x][0]
|
||||
if lp and nc:
|
||||
cpu_string = f"{name}, {nc}C/{lp}T"
|
||||
else:
|
||||
cpu_string = name
|
||||
ret.append(cpu_string)
|
||||
return ret
|
||||
except:
|
||||
return ["unknown cpu model"]
|
||||
@@ -411,13 +447,20 @@ class Agent(BaseAuditModel):
|
||||
@property
|
||||
def serial_number(self) -> str:
|
||||
if self.is_posix:
|
||||
return ""
|
||||
try:
|
||||
return self.wmi_detail["serialnumber"]
|
||||
except:
|
||||
return ""
|
||||
|
||||
try:
|
||||
return self.wmi_detail["bios"][0][0]["SerialNumber"]
|
||||
except:
|
||||
return ""
|
||||
|
||||
@property
|
||||
def hex_mesh_node_id(self) -> str:
|
||||
return _b64_to_hex(self.mesh_node_id)
|
||||
|
||||
@classmethod
|
||||
def online_agents(cls, min_version: str = "") -> "List[Agent]":
|
||||
if min_version:
|
||||
@@ -505,24 +548,32 @@ class Agent(BaseAuditModel):
|
||||
)
|
||||
|
||||
return {
|
||||
"agent_policy": self.policy
|
||||
if self.policy and not self.policy.is_agent_excluded(self)
|
||||
else None,
|
||||
"site_policy": site_policy
|
||||
if (site_policy and not site_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
else None,
|
||||
"client_policy": client_policy
|
||||
if (client_policy and not client_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
and not self.site.block_policy_inheritance
|
||||
else None,
|
||||
"default_policy": default_policy
|
||||
if (default_policy and not default_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
and not self.site.block_policy_inheritance
|
||||
and not self.client.block_policy_inheritance
|
||||
else None,
|
||||
"agent_policy": (
|
||||
self.policy
|
||||
if self.policy and not self.policy.is_agent_excluded(self)
|
||||
else None
|
||||
),
|
||||
"site_policy": (
|
||||
site_policy
|
||||
if (site_policy and not site_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
else None
|
||||
),
|
||||
"client_policy": (
|
||||
client_policy
|
||||
if (client_policy and not client_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
and not self.site.block_policy_inheritance
|
||||
else None
|
||||
),
|
||||
"default_policy": (
|
||||
default_policy
|
||||
if (default_policy and not default_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
and not self.site.block_policy_inheritance
|
||||
and not self.client.block_policy_inheritance
|
||||
else None
|
||||
),
|
||||
}
|
||||
|
||||
def check_run_interval(self) -> int:
|
||||
@@ -568,6 +619,8 @@ class Agent(BaseAuditModel):
|
||||
},
|
||||
"run_as_user": run_as_user,
|
||||
"env_vars": parsed_env_vars,
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
|
||||
if history_pk != 0:
|
||||
@@ -798,9 +851,6 @@ class Agent(BaseAuditModel):
|
||||
cache.set(cache_key, tasks, 600)
|
||||
return tasks
|
||||
|
||||
def _do_nats_debug(self, agent: "Agent", message: str) -> None:
|
||||
DebugLog.error(agent=agent, log_type=DebugLogType.AGENT_ISSUES, message=message)
|
||||
|
||||
async def nats_cmd(
|
||||
self, data: Dict[Any, Any], timeout: int = 30, wait: bool = True
|
||||
) -> Any:
|
||||
@@ -822,9 +872,7 @@ class Agent(BaseAuditModel):
|
||||
ret = msgpack.loads(msg.data)
|
||||
except Exception as e:
|
||||
ret = str(e)
|
||||
await sync_to_async(self._do_nats_debug, thread_sensitive=False)(
|
||||
agent=self, message=ret
|
||||
)
|
||||
logger.error(e)
|
||||
|
||||
await nc.close()
|
||||
return ret
|
||||
@@ -907,18 +955,22 @@ class Agent(BaseAuditModel):
|
||||
def should_create_alert(
|
||||
self, alert_template: "Optional[AlertTemplate]" = None
|
||||
) -> bool:
|
||||
return bool(
|
||||
has_agent_notification = (
|
||||
self.overdue_dashboard_alert
|
||||
or self.overdue_email_alert
|
||||
or self.overdue_text_alert
|
||||
or (
|
||||
alert_template
|
||||
and (
|
||||
alert_template.agent_always_alert
|
||||
or alert_template.agent_always_email
|
||||
or alert_template.agent_always_text
|
||||
)
|
||||
)
|
||||
)
|
||||
has_alert_template_notification = alert_template and (
|
||||
alert_template.agent_always_alert
|
||||
or alert_template.agent_always_email
|
||||
or alert_template.agent_always_text
|
||||
)
|
||||
|
||||
return bool(
|
||||
has_agent_notification
|
||||
or has_alert_template_notification
|
||||
or has_webhook(alert_template, "agent")
|
||||
or has_script_actions(alert_template, "agent")
|
||||
)
|
||||
|
||||
def send_outage_email(self) -> None:
|
||||
@@ -1047,6 +1099,7 @@ class AgentCustomField(models.Model):
|
||||
class AgentHistory(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="history",
|
||||
@@ -1069,6 +1122,15 @@ class AgentHistory(models.Model):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_results = models.JSONField(null=True, blank=True)
|
||||
custom_field = models.ForeignKey(
|
||||
"core.CustomField",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="history",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
collector_all_output = models.BooleanField(default=False)
|
||||
save_to_agent_note = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.agent.hostname} - {self.type}"
|
||||
|
||||
@@ -47,13 +47,6 @@ class UpdateAgentPerms(permissions.BasePermission):
|
||||
return _has_perm(r, "can_update_agents")
|
||||
|
||||
|
||||
class PingAgentPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return _has_perm(r, "can_ping_agents") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class ManageProcPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return _has_perm(r, "can_manage_procs") and _has_perm_on_agent(
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from tacticalrmm.constants import AGENT_STATUS_ONLINE
|
||||
from tacticalrmm.constants import AGENT_STATUS_ONLINE, ALL_TIMEZONES
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, AgentCustomField, AgentHistory, Note
|
||||
@@ -71,7 +70,7 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
return policies
|
||||
|
||||
def get_all_timezones(self, obj):
|
||||
return pytz.all_timezones
|
||||
return ALL_TIMEZONES
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
|
||||
@@ -175,7 +175,7 @@ def run_script_email_results_task(
|
||||
return
|
||||
|
||||
CORE = get_core_settings()
|
||||
subject = f"{agent.hostname} {script.name} Results"
|
||||
subject = f"{agent.client.name}, {agent.site.name}, {agent.hostname} {script.name} Results"
|
||||
exec_time = "{:.4f}".format(r["execution_time"])
|
||||
body = (
|
||||
subject
|
||||
|
||||
61
api/tacticalrmm/agents/tests/test_agent_save.py
Normal file
61
api/tacticalrmm/agents/tests/test_agent_save.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.constants import AgentMonType
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class AgentSaveTestCase(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.client1 = baker.make("clients.Client")
|
||||
self.client2 = baker.make("clients.Client")
|
||||
self.site1 = baker.make("clients.Site", client=self.client1)
|
||||
self.site2 = baker.make("clients.Site", client=self.client2)
|
||||
self.site3 = baker.make("clients.Site", client=self.client2)
|
||||
self.agent = baker.make(
|
||||
"agents.Agent",
|
||||
site=self.site1,
|
||||
monitoring_type=AgentMonType.SERVER,
|
||||
)
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_called_on_mon_type_change(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
self.agent.monitoring_type = AgentMonType.WORKSTATION
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_called_once()
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_called_on_site_change(self, mock_set_alert_template):
|
||||
self.agent.site = self.site2
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_called_once()
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_called_on_site_and_montype_change(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
print(f"before: {self.agent.monitoring_type} site: {self.agent.site_id}")
|
||||
self.agent.site = self.site3
|
||||
self.agent.monitoring_type = AgentMonType.WORKSTATION
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_called_once()
|
||||
print(f"after: {self.agent.monitoring_type} site: {self.agent.site_id}")
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_not_called_without_changes(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_not_called()
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_not_called_on_non_relevant_field_change(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
self.agent.hostname = "abc123"
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_not_called()
|
||||
@@ -2,7 +2,7 @@ import json
|
||||
import os
|
||||
from itertools import cycle
|
||||
from typing import TYPE_CHECKING
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import PropertyMock, patch
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from django.conf import settings
|
||||
@@ -768,6 +768,67 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.assertEqual(Note.objects.get(agent=self.agent).note, "ok")
|
||||
|
||||
# test run on server
|
||||
with patch("core.utils.run_server_script") as mock_run_server_script:
|
||||
mock_run_server_script.return_value = ("output", "error", 1.23456789, 0)
|
||||
data = {
|
||||
"script": script.pk,
|
||||
"output": "wait",
|
||||
"args": ["arg1", "arg2"],
|
||||
"timeout": 15,
|
||||
"run_as_user": False,
|
||||
"env_vars": ["key1=val1", "key2=val2"],
|
||||
"run_on_server": True,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
|
||||
if not hist:
|
||||
raise AgentHistory.DoesNotExist
|
||||
|
||||
mock_run_server_script.assert_called_with(
|
||||
body=script.script_body,
|
||||
args=script.parse_script_args(self.agent, script.shell, data["args"]),
|
||||
env_vars=script.parse_script_env_vars(
|
||||
self.agent, script.shell, data["env_vars"]
|
||||
),
|
||||
shell=script.shell,
|
||||
timeout=18,
|
||||
)
|
||||
|
||||
expected_ret = {
|
||||
"stdout": "output",
|
||||
"stderr": "error",
|
||||
"execution_time": "1.2346",
|
||||
"retcode": 0,
|
||||
}
|
||||
|
||||
self.assertEqual(r.data, expected_ret)
|
||||
|
||||
hist.refresh_from_db()
|
||||
expected_script_results = {**expected_ret, "id": hist.pk}
|
||||
self.assertEqual(hist.script_results, expected_script_results)
|
||||
|
||||
# test run on server with server scripts disabled
|
||||
with patch(
|
||||
"core.models.CoreSettings.server_scripts_enabled",
|
||||
new_callable=PropertyMock,
|
||||
) as server_scripts_enabled:
|
||||
server_scripts_enabled.return_value = False
|
||||
|
||||
data = {
|
||||
"script": script.pk,
|
||||
"output": "wait",
|
||||
"args": ["arg1", "arg2"],
|
||||
"timeout": 15,
|
||||
"run_as_user": False,
|
||||
"env_vars": ["key1=val1", "key2=val2"],
|
||||
"run_on_server": True,
|
||||
}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_get_notes(self):
|
||||
url = f"{base_url}/notes/"
|
||||
|
||||
@@ -1020,7 +1081,6 @@ class TestAgentPermissions(TacticalTestCase):
|
||||
{"method": "post", "action": "recover", "role": "can_recover_agents"},
|
||||
{"method": "post", "action": "reboot", "role": "can_reboot_agents"},
|
||||
{"method": "patch", "action": "reboot", "role": "can_reboot_agents"},
|
||||
{"method": "get", "action": "ping", "role": "can_ping_agents"},
|
||||
{"method": "get", "action": "meshcentral", "role": "can_use_mesh"},
|
||||
{"method": "post", "action": "meshcentral/recover", "role": "can_use_mesh"},
|
||||
{"method": "get", "action": "processes", "role": "can_manage_procs"},
|
||||
|
||||
@@ -15,6 +15,7 @@ urlpatterns = [
|
||||
path("<agent:agent_id>/wmi/", views.WMI.as_view()),
|
||||
path("<agent:agent_id>/recover/", views.recover),
|
||||
path("<agent:agent_id>/reboot/", views.Reboot.as_view()),
|
||||
path("<agent:agent_id>/shutdown/", views.Shutdown.as_view()),
|
||||
path("<agent:agent_id>/ping/", views.ping),
|
||||
# alias for checks get view
|
||||
path("<agent:agent_id>/checks/", GetAddChecks.as_view()),
|
||||
|
||||
@@ -21,6 +21,7 @@ from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from core.utils import (
|
||||
get_core_settings,
|
||||
get_mesh_ws_url,
|
||||
@@ -65,7 +66,6 @@ from .permissions import (
|
||||
InstallAgentPerms,
|
||||
ManageProcPerms,
|
||||
MeshPerms,
|
||||
PingAgentPerms,
|
||||
RebootAgentPerms,
|
||||
RecoverAgentPerms,
|
||||
RunBulkPerms,
|
||||
@@ -259,6 +259,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("The agent was updated successfully")
|
||||
|
||||
# uninstall agent
|
||||
@@ -284,6 +285,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
message=f"Unable to remove agent {name} from meshcentral database: {e}",
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
)
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
|
||||
|
||||
@@ -326,13 +328,13 @@ class AgentMeshCentral(APIView):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
core = get_core_settings()
|
||||
|
||||
if not core.mesh_disable_auto_login:
|
||||
token = get_login_token(
|
||||
key=core.mesh_token, user=f"user//{core.mesh_username}"
|
||||
)
|
||||
token_param = f"login={token}&"
|
||||
else:
|
||||
token_param = ""
|
||||
user = (
|
||||
request.user.mesh_user_id
|
||||
if core.sync_mesh_with_trmm
|
||||
else f"user//{core.mesh_api_superuser}"
|
||||
)
|
||||
token = get_login_token(key=core.mesh_token, user=user)
|
||||
token_param = f"login={token}&"
|
||||
|
||||
control = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
terminal = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
@@ -402,7 +404,7 @@ def update_agents(request):
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAuthenticated, PingAgentPerms])
|
||||
@permission_classes([IsAuthenticated, AgentPerms])
|
||||
def ping(request, agent_id):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
status = AGENT_STATUS_OFFLINE
|
||||
@@ -492,6 +494,19 @@ def send_raw_cmd(request, agent_id):
|
||||
return Response(r)
|
||||
|
||||
|
||||
class Shutdown(APIView):
|
||||
permission_classes = [IsAuthenticated, RebootAgentPerms]
|
||||
|
||||
# shutdown
|
||||
def post(self, request, agent_id):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
r = asyncio.run(agent.nats_cmd({"func": "shutdown"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Reboot(APIView):
|
||||
permission_classes = [IsAuthenticated, RebootAgentPerms]
|
||||
|
||||
@@ -753,6 +768,10 @@ def run_script(request, agent_id):
|
||||
run_as_user: bool = request.data["run_as_user"]
|
||||
env_vars: list[str] = request.data["env_vars"]
|
||||
req_timeout = int(request.data["timeout"]) + 3
|
||||
run_on_server: bool | None = request.data.get("run_on_server")
|
||||
|
||||
if run_on_server and not get_core_settings().server_scripts_enabled:
|
||||
return notify_error("This feature is disabled.")
|
||||
|
||||
AuditLog.audit_script_run(
|
||||
username=request.user.username,
|
||||
@@ -769,6 +788,29 @@ def run_script(request, agent_id):
|
||||
)
|
||||
history_pk = hist.pk
|
||||
|
||||
if run_on_server:
|
||||
from core.utils import run_server_script
|
||||
|
||||
r = run_server_script(
|
||||
body=script.script_body,
|
||||
args=script.parse_script_args(agent, script.shell, args),
|
||||
env_vars=script.parse_script_env_vars(agent, script.shell, env_vars),
|
||||
shell=script.shell,
|
||||
timeout=req_timeout,
|
||||
)
|
||||
|
||||
ret = {
|
||||
"stdout": r[0],
|
||||
"stderr": r[1],
|
||||
"execution_time": "{:.4f}".format(r[2]),
|
||||
"retcode": r[3],
|
||||
}
|
||||
|
||||
hist.script_results = {**ret, "id": history_pk}
|
||||
hist.save(update_fields=["script_results"])
|
||||
|
||||
return Response(ret)
|
||||
|
||||
if output == "wait":
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk,
|
||||
@@ -972,6 +1014,8 @@ def bulk(request):
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
ht = "Check the History tab on the agent to view the results."
|
||||
|
||||
if request.data["mode"] == "command":
|
||||
if request.data["shell"] == "custom" and request.data["custom_shell"]:
|
||||
shell = request.data["custom_shell"]
|
||||
@@ -986,11 +1030,21 @@ def bulk(request):
|
||||
username=request.user.username[:50],
|
||||
run_as_user=request.data["run_as_user"],
|
||||
)
|
||||
return Response(f"Command will now be run on {len(agents)} agents")
|
||||
return Response(f"Command will now be run on {len(agents)} agents. {ht}")
|
||||
|
||||
elif request.data["mode"] == "script":
|
||||
script = get_object_or_404(Script, pk=request.data["script"])
|
||||
|
||||
# prevent API from breaking for those who haven't updated payload
|
||||
try:
|
||||
custom_field_pk = request.data["custom_field"]
|
||||
collector_all_output = request.data["collector_all_output"]
|
||||
save_to_agent_note = request.data["save_to_agent_note"]
|
||||
except KeyError:
|
||||
custom_field_pk = None
|
||||
collector_all_output = False
|
||||
save_to_agent_note = False
|
||||
|
||||
bulk_script_task.delay(
|
||||
script_pk=script.pk,
|
||||
agent_pks=agents,
|
||||
@@ -999,9 +1053,12 @@ def bulk(request):
|
||||
username=request.user.username[:50],
|
||||
run_as_user=request.data["run_as_user"],
|
||||
env_vars=request.data["env_vars"],
|
||||
custom_field_pk=custom_field_pk,
|
||||
collector_all_output=collector_all_output,
|
||||
save_to_agent_note=save_to_agent_note,
|
||||
)
|
||||
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents. {ht}")
|
||||
|
||||
elif request.data["mode"] == "patch":
|
||||
if request.data["patchMode"] == "install":
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 20:21
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0045_coresettings_enable_server_scripts_and_more"),
|
||||
("alerts", "0013_alerttemplate_action_env_vars_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="alerttemplate",
|
||||
name="action_rest",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="url_action_alert_template",
|
||||
to="core.urlaction",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="alerttemplate",
|
||||
name="action_type",
|
||||
field=models.CharField(
|
||||
choices=[("script", "Script"), ("server", "Server"), ("rest", "Rest")],
|
||||
default="script",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="alerttemplate",
|
||||
name="resolved_action_rest",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="resolved_url_action_alert_template",
|
||||
to="core.urlaction",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="alerttemplate",
|
||||
name="resolved_action_type",
|
||||
field=models.CharField(
|
||||
choices=[("script", "Script"), ("server", "Server"), ("rest", "Rest")],
|
||||
default="script",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
@@ -8,16 +7,20 @@ from django.db import models
|
||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from core.utils import run_server_script, run_url_rest_action
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from tacticalrmm.constants import (
|
||||
AgentHistoryType,
|
||||
AgentMonType,
|
||||
AlertSeverity,
|
||||
AlertTemplateActionType,
|
||||
AlertType,
|
||||
CheckType,
|
||||
DebugLogType,
|
||||
)
|
||||
from tacticalrmm.logger import logger
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
from tacticalrmm.utils import RE_DB_VALUE, get_db_value
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from agents.models import Agent
|
||||
@@ -95,6 +98,15 @@ class Alert(models.Model):
|
||||
def client(self) -> "Client":
|
||||
return self.agent.client
|
||||
|
||||
@property
|
||||
def get_result(self):
|
||||
if self.alert_type == AlertType.CHECK:
|
||||
return self.assigned_check.checkresults.get(agent=self.agent)
|
||||
elif self.alert_type == AlertType.TASK:
|
||||
return self.assigned_task.taskresults.get(agent=self.agent)
|
||||
|
||||
return None
|
||||
|
||||
def resolve(self) -> None:
|
||||
self.resolved = True
|
||||
self.resolved_on = djangotime.now()
|
||||
@@ -106,6 +118,9 @@ class Alert(models.Model):
|
||||
def create_or_return_availability_alert(
|
||||
cls, agent: Agent, skip_create: bool = False
|
||||
) -> Optional[Alert]:
|
||||
if agent.maintenance_mode:
|
||||
return None
|
||||
|
||||
if not cls.objects.filter(
|
||||
agent=agent, alert_type=AlertType.AVAILABILITY, resolved=False
|
||||
).exists():
|
||||
@@ -118,7 +133,7 @@ class Alert(models.Model):
|
||||
agent=agent,
|
||||
alert_type=AlertType.AVAILABILITY,
|
||||
severity=AlertSeverity.ERROR,
|
||||
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
|
||||
message=f"{agent.hostname} in {agent.client.name}, {agent.site.name} is overdue.",
|
||||
hidden=True,
|
||||
),
|
||||
)
|
||||
@@ -154,6 +169,9 @@ class Alert(models.Model):
|
||||
alert_severity: Optional[str] = None,
|
||||
skip_create: bool = False,
|
||||
) -> "Optional[Alert]":
|
||||
if agent.maintenance_mode:
|
||||
return None
|
||||
|
||||
# need to pass agent if the check is a policy
|
||||
if not cls.objects.filter(
|
||||
assigned_check=check,
|
||||
@@ -169,15 +187,17 @@ class Alert(models.Model):
|
||||
assigned_check=check,
|
||||
agent=agent,
|
||||
alert_type=AlertType.CHECK,
|
||||
severity=check.alert_severity
|
||||
if check.check_type
|
||||
not in {
|
||||
CheckType.MEMORY,
|
||||
CheckType.CPU_LOAD,
|
||||
CheckType.DISK_SPACE,
|
||||
CheckType.SCRIPT,
|
||||
}
|
||||
else alert_severity,
|
||||
severity=(
|
||||
check.alert_severity
|
||||
if check.check_type
|
||||
not in {
|
||||
CheckType.MEMORY,
|
||||
CheckType.CPU_LOAD,
|
||||
CheckType.DISK_SPACE,
|
||||
CheckType.SCRIPT,
|
||||
}
|
||||
else alert_severity
|
||||
),
|
||||
message=f"{agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
||||
hidden=True,
|
||||
),
|
||||
@@ -216,6 +236,9 @@ class Alert(models.Model):
|
||||
agent: "Agent",
|
||||
skip_create: bool = False,
|
||||
) -> "Optional[Alert]":
|
||||
if agent.maintenance_mode:
|
||||
return None
|
||||
|
||||
if not cls.objects.filter(
|
||||
assigned_task=task,
|
||||
agent=agent,
|
||||
@@ -270,7 +293,9 @@ class Alert(models.Model):
|
||||
from agents.models import Agent, AgentHistory
|
||||
from autotasks.models import TaskResult
|
||||
from checks.models import CheckResult
|
||||
from core.models import CoreSettings
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
# set variables
|
||||
dashboard_severities = None
|
||||
email_severities = None
|
||||
@@ -281,7 +306,7 @@ class Alert(models.Model):
|
||||
alert_interval = None
|
||||
email_task = None
|
||||
text_task = None
|
||||
run_script_action = None
|
||||
should_run_script_or_webhook = False
|
||||
|
||||
# check what the instance passed is
|
||||
if isinstance(instance, Agent):
|
||||
@@ -307,7 +332,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.agent_always_email
|
||||
always_text = alert_template.agent_always_text
|
||||
alert_interval = alert_template.agent_periodic_alert_days
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
should_run_script_or_webhook = alert_template.agent_script_actions
|
||||
|
||||
elif isinstance(instance, CheckResult):
|
||||
from checks.tasks import (
|
||||
@@ -358,7 +383,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.check_always_email
|
||||
always_text = alert_template.check_always_text
|
||||
alert_interval = alert_template.check_periodic_alert_days
|
||||
run_script_action = alert_template.check_script_actions
|
||||
should_run_script_or_webhook = alert_template.check_script_actions
|
||||
|
||||
elif isinstance(instance, TaskResult):
|
||||
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
|
||||
@@ -392,7 +417,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.task_always_email
|
||||
always_text = alert_template.task_always_text
|
||||
alert_interval = alert_template.task_periodic_alert_days
|
||||
run_script_action = alert_template.task_script_actions
|
||||
should_run_script_or_webhook = alert_template.task_script_actions
|
||||
|
||||
else:
|
||||
return
|
||||
@@ -420,12 +445,23 @@ class Alert(models.Model):
|
||||
alert.hidden = False
|
||||
alert.save(update_fields=["hidden"])
|
||||
|
||||
# TODO rework this
|
||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
||||
email_alert = False
|
||||
always_email = False
|
||||
|
||||
elif (
|
||||
alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
email_alert = False
|
||||
always_email = False
|
||||
|
||||
# send email if enabled
|
||||
if email_alert or always_email:
|
||||
# check if alert template is set and specific severities are configured
|
||||
if (
|
||||
not alert_template
|
||||
or alert_template
|
||||
if not alert_template or (
|
||||
alert_template
|
||||
and email_severities
|
||||
and alert.severity in email_severities
|
||||
):
|
||||
@@ -434,41 +470,89 @@ class Alert(models.Model):
|
||||
alert_interval=alert_interval,
|
||||
)
|
||||
|
||||
# TODO rework this
|
||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
||||
text_alert = False
|
||||
always_text = False
|
||||
elif (
|
||||
alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
text_alert = False
|
||||
always_text = False
|
||||
|
||||
# send text if enabled
|
||||
if text_alert or always_text:
|
||||
# check if alert template is set and specific severities are configured
|
||||
if (
|
||||
not alert_template
|
||||
or alert_template
|
||||
and text_severities
|
||||
and alert.severity in text_severities
|
||||
if not alert_template or (
|
||||
alert_template and text_severities and alert.severity in text_severities
|
||||
):
|
||||
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||
|
||||
# check if any scripts should be run
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.action
|
||||
and run_script_action
|
||||
and not alert.action_run
|
||||
):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.SCRIPT_RUN,
|
||||
script=alert_template.action,
|
||||
username="alert-action-failure",
|
||||
)
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
timeout=alert_template.action_timeout,
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
run_as_user=False,
|
||||
env_vars=alert_template.action_env_vars,
|
||||
)
|
||||
# check if any scripts/webhooks should be run
|
||||
if alert_template and not alert.action_run and should_run_script_or_webhook:
|
||||
if (
|
||||
alert_template.action_type == AlertTemplateActionType.SCRIPT
|
||||
and alert_template.action
|
||||
):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.SCRIPT_RUN,
|
||||
script=alert_template.action,
|
||||
username="alert-action-failure",
|
||||
)
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
timeout=alert_template.action_timeout,
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
run_as_user=False,
|
||||
env_vars=alert.parse_script_args(alert_template.action_env_vars),
|
||||
)
|
||||
elif (
|
||||
alert_template.action_type == AlertTemplateActionType.SERVER
|
||||
and alert_template.action
|
||||
):
|
||||
stdout, stderr, execution_time, retcode = run_server_script(
|
||||
body=alert_template.action.script_body,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
timeout=alert_template.action_timeout,
|
||||
env_vars=alert.parse_script_args(alert_template.action_env_vars),
|
||||
shell=alert_template.action.shell,
|
||||
)
|
||||
|
||||
r = {
|
||||
"retcode": retcode,
|
||||
"stdout": stdout,
|
||||
"stderr": stderr,
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
|
||||
elif alert_template.action_type == AlertTemplateActionType.REST:
|
||||
if (
|
||||
alert.severity == AlertSeverity.INFO
|
||||
and not core.notify_on_info_alerts
|
||||
or alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
return
|
||||
else:
|
||||
output, status = run_url_rest_action(
|
||||
action_id=alert_template.action_rest.id, instance=alert
|
||||
)
|
||||
logger.debug(f"{output=} {status=}")
|
||||
|
||||
r = {
|
||||
"stdout": output,
|
||||
"stderr": "",
|
||||
"execution_time": 0,
|
||||
"retcode": status,
|
||||
}
|
||||
else:
|
||||
return
|
||||
|
||||
# command was successful
|
||||
if isinstance(r, dict):
|
||||
@@ -479,11 +563,17 @@ class Alert(models.Model):
|
||||
alert.action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
|
||||
)
|
||||
if alert_template.action_type == AlertTemplateActionType.SCRIPT:
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
|
||||
)
|
||||
else:
|
||||
DebugLog.error(
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Failure action: {alert_template.action.name} failed to run on server for failure alert",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def handle_alert_resolve(
|
||||
@@ -492,13 +582,18 @@ class Alert(models.Model):
|
||||
from agents.models import Agent, AgentHistory
|
||||
from autotasks.models import TaskResult
|
||||
from checks.models import CheckResult
|
||||
from core.models import CoreSettings
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
|
||||
# set variables
|
||||
email_severities = None
|
||||
text_severities = None
|
||||
email_on_resolved = False
|
||||
text_on_resolved = False
|
||||
resolved_email_task = None
|
||||
resolved_text_task = None
|
||||
run_script_action = None
|
||||
should_run_script_or_webhook = False
|
||||
|
||||
# check what the instance passed is
|
||||
if isinstance(instance, Agent):
|
||||
@@ -514,7 +609,9 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.agent_email_on_resolved
|
||||
text_on_resolved = alert_template.agent_text_on_resolved
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
should_run_script_or_webhook = alert_template.agent_script_actions
|
||||
email_severities = [AlertSeverity.ERROR]
|
||||
text_severities = [AlertSeverity.ERROR]
|
||||
|
||||
if agent.overdue_email_alert:
|
||||
email_on_resolved = True
|
||||
@@ -537,7 +634,15 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.check_email_on_resolved
|
||||
text_on_resolved = alert_template.check_text_on_resolved
|
||||
run_script_action = alert_template.check_script_actions
|
||||
should_run_script_or_webhook = alert_template.check_script_actions
|
||||
email_severities = alert_template.check_email_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
text_severities = alert_template.check_text_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
|
||||
elif isinstance(instance, TaskResult):
|
||||
from autotasks.tasks import (
|
||||
@@ -555,7 +660,15 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.task_email_on_resolved
|
||||
text_on_resolved = alert_template.task_text_on_resolved
|
||||
run_script_action = alert_template.task_script_actions
|
||||
should_run_script_or_webhook = alert_template.task_script_actions
|
||||
email_severities = alert_template.task_email_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
text_severities = alert_template.task_text_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
|
||||
else:
|
||||
return
|
||||
@@ -570,36 +683,103 @@ class Alert(models.Model):
|
||||
|
||||
# check if a resolved email notification should be send
|
||||
if email_on_resolved and not alert.resolved_email_sent:
|
||||
resolved_email_task.delay(pk=alert.pk)
|
||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
||||
pass
|
||||
|
||||
elif (
|
||||
alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
pass
|
||||
elif email_severities and alert.severity not in email_severities:
|
||||
pass
|
||||
else:
|
||||
resolved_email_task.delay(pk=alert.pk)
|
||||
|
||||
# check if resolved text should be sent
|
||||
if text_on_resolved and not alert.resolved_sms_sent:
|
||||
resolved_text_task.delay(pk=alert.pk)
|
||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
||||
pass
|
||||
|
||||
# check if resolved script should be run
|
||||
elif (
|
||||
alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
pass
|
||||
elif text_severities and alert.severity not in text_severities:
|
||||
pass
|
||||
else:
|
||||
resolved_text_task.delay(pk=alert.pk)
|
||||
|
||||
# check if resolved script/webhook should be run
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.resolved_action
|
||||
and run_script_action
|
||||
and not alert.resolved_action_run
|
||||
and should_run_script_or_webhook
|
||||
):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.SCRIPT_RUN,
|
||||
script=alert_template.action,
|
||||
username="alert-action-resolved",
|
||||
)
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.resolved_action.pk,
|
||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
run_as_user=False,
|
||||
env_vars=alert_template.resolved_action_env_vars,
|
||||
)
|
||||
if (
|
||||
alert_template.resolved_action_type == AlertTemplateActionType.SCRIPT
|
||||
and alert_template.resolved_action
|
||||
):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.SCRIPT_RUN,
|
||||
script=alert_template.resolved_action,
|
||||
username="alert-action-resolved",
|
||||
)
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.resolved_action.pk,
|
||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
run_as_user=False,
|
||||
env_vars=alert_template.resolved_action_env_vars,
|
||||
)
|
||||
elif (
|
||||
alert_template.resolved_action_type == AlertTemplateActionType.SERVER
|
||||
and alert_template.resolved_action
|
||||
):
|
||||
stdout, stderr, execution_time, retcode = run_server_script(
|
||||
body=alert_template.resolved_action.script_body,
|
||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
env_vars=alert.parse_script_args(
|
||||
alert_template.resolved_action_env_vars
|
||||
),
|
||||
shell=alert_template.resolved_action.shell,
|
||||
)
|
||||
r = {
|
||||
"stdout": stdout,
|
||||
"stderr": stderr,
|
||||
"execution_time": execution_time,
|
||||
"retcode": retcode,
|
||||
}
|
||||
|
||||
elif alert_template.action_type == AlertTemplateActionType.REST:
|
||||
if (
|
||||
alert.severity == AlertSeverity.INFO
|
||||
and not core.notify_on_info_alerts
|
||||
or alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
return
|
||||
else:
|
||||
output, status = run_url_rest_action(
|
||||
action_id=alert_template.resolved_action_rest.id, instance=alert
|
||||
)
|
||||
logger.debug(f"{output=} {status=}")
|
||||
|
||||
r = {
|
||||
"stdout": output,
|
||||
"stderr": "",
|
||||
"execution_time": 0,
|
||||
"retcode": status,
|
||||
}
|
||||
else:
|
||||
return
|
||||
|
||||
# command was successful
|
||||
if isinstance(r, dict):
|
||||
@@ -612,40 +792,36 @@ class Alert(models.Model):
|
||||
alert.resolved_action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
||||
)
|
||||
if (
|
||||
alert_template.resolved_action_type
|
||||
== AlertTemplateActionType.SCRIPT
|
||||
):
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
||||
)
|
||||
else:
|
||||
DebugLog.error(
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Resolved action: {alert_template.action.name} failed to run on server for resolved alert",
|
||||
)
|
||||
|
||||
def parse_script_args(self, args: List[str]) -> List[str]:
|
||||
if not args:
|
||||
return []
|
||||
|
||||
temp_args = []
|
||||
# pattern to match for injection
|
||||
pattern = re.compile(".*\\{\\{alert\\.(.*)\\}\\}.*")
|
||||
|
||||
for arg in args:
|
||||
if match := pattern.match(arg):
|
||||
name = match.group(1)
|
||||
temp_arg = arg
|
||||
for string, model, prop in RE_DB_VALUE.findall(arg):
|
||||
value = get_db_value(string=f"{model}.{prop}", instance=self)
|
||||
|
||||
# check if attr exists and isn't a function
|
||||
if hasattr(self, name) and not callable(getattr(self, name)):
|
||||
value = f"'{getattr(self, name)}'"
|
||||
else:
|
||||
continue
|
||||
if value is not None:
|
||||
temp_arg = temp_arg.replace(string, f"'{str(value)}'")
|
||||
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg))
|
||||
except re.error:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", re.escape(value), arg))
|
||||
except Exception as e:
|
||||
DebugLog.error(log_type=DebugLogType.SCRIPTING, message=str(e))
|
||||
continue
|
||||
|
||||
else:
|
||||
temp_args.append(arg)
|
||||
temp_args.append(temp_arg)
|
||||
|
||||
return temp_args
|
||||
|
||||
@@ -654,6 +830,11 @@ class AlertTemplate(BaseAuditModel):
|
||||
name = models.CharField(max_length=100)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
action_type = models.CharField(
|
||||
max_length=10,
|
||||
choices=AlertTemplateActionType.choices,
|
||||
default=AlertTemplateActionType.SCRIPT,
|
||||
)
|
||||
action = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
related_name="alert_template",
|
||||
@@ -661,6 +842,13 @@ class AlertTemplate(BaseAuditModel):
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
action_rest = models.ForeignKey(
|
||||
"core.URLAction",
|
||||
related_name="url_action_alert_template",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
action_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
@@ -674,6 +862,11 @@ class AlertTemplate(BaseAuditModel):
|
||||
default=list,
|
||||
)
|
||||
action_timeout = models.PositiveIntegerField(default=15)
|
||||
resolved_action_type = models.CharField(
|
||||
max_length=10,
|
||||
choices=AlertTemplateActionType.choices,
|
||||
default=AlertTemplateActionType.SCRIPT,
|
||||
)
|
||||
resolved_action = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
related_name="resolved_alert_template",
|
||||
@@ -681,6 +874,13 @@ class AlertTemplate(BaseAuditModel):
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
resolved_action_rest = models.ForeignKey(
|
||||
"core.URLAction",
|
||||
related_name="resolved_url_action_alert_template",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
resolved_action_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
@@ -719,7 +919,8 @@ class AlertTemplate(BaseAuditModel):
|
||||
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
agent_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
# fmt: off
|
||||
agent_script_actions = BooleanField(null=True, blank=True, default=True) # should be renamed because also deals with webhooks
|
||||
|
||||
# check alert settings
|
||||
check_email_alert_severity = ArrayField(
|
||||
@@ -743,7 +944,8 @@ class AlertTemplate(BaseAuditModel):
|
||||
check_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
check_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
# fmt: off
|
||||
check_script_actions = BooleanField(null=True, blank=True, default=True) # should be renamed because also deals with webhooks
|
||||
|
||||
# task alert settings
|
||||
task_email_alert_severity = ArrayField(
|
||||
@@ -767,7 +969,8 @@ class AlertTemplate(BaseAuditModel):
|
||||
task_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
task_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
# fmt: off
|
||||
task_script_actions = BooleanField(null=True, blank=True, default=True) # should be renamed because also deals with webhooks
|
||||
|
||||
# exclusion settings
|
||||
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
||||
|
||||
@@ -3,6 +3,7 @@ from typing import TYPE_CHECKING
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.constants import AlertTemplateActionType
|
||||
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -53,4 +54,17 @@ class AlertTemplatePerms(permissions.BasePermission):
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_alerttemplates")
|
||||
|
||||
if r.method in ("POST", "PUT", "PATCH"):
|
||||
# ensure only users with explicit run server script perms can add/modify alert templates
|
||||
# while also still requiring the manage alert template perm
|
||||
if isinstance(r.data, dict):
|
||||
if (
|
||||
r.data.get("action_type") == AlertTemplateActionType.SERVER
|
||||
or r.data.get("resolved_action_type")
|
||||
== AlertTemplateActionType.SERVER
|
||||
):
|
||||
return _has_perm(r, "can_run_server_scripts") and _has_perm(
|
||||
r, "can_manage_alerttemplates"
|
||||
)
|
||||
|
||||
return _has_perm(r, "can_manage_alerttemplates")
|
||||
|
||||
@@ -3,6 +3,7 @@ from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||
|
||||
from automation.serializers import PolicySerializer
|
||||
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
|
||||
from tacticalrmm.constants import AlertTemplateActionType
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
|
||||
@@ -25,14 +26,29 @@ class AlertTemplateSerializer(ModelSerializer):
|
||||
task_settings = ReadOnlyField(source="has_task_settings")
|
||||
core_settings = ReadOnlyField(source="has_core_settings")
|
||||
default_template = ReadOnlyField(source="is_default_template")
|
||||
action_name = ReadOnlyField(source="action.name")
|
||||
resolved_action_name = ReadOnlyField(source="resolved_action.name")
|
||||
action_name = SerializerMethodField()
|
||||
resolved_action_name = SerializerMethodField()
|
||||
applied_count = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
def get_action_name(self, obj):
|
||||
if obj.action_type == AlertTemplateActionType.REST and obj.action_rest:
|
||||
return obj.action_rest.name
|
||||
|
||||
return obj.action.name if obj.action else ""
|
||||
|
||||
def get_resolved_action_name(self, obj):
|
||||
if (
|
||||
obj.resolved_action_type == AlertTemplateActionType.REST
|
||||
and obj.resolved_action_rest
|
||||
):
|
||||
return obj.resolved_action_rest.name
|
||||
|
||||
return obj.resolved_action.name if obj.resolved_action else ""
|
||||
|
||||
def get_applied_count(self, instance):
|
||||
return (
|
||||
instance.policies.count()
|
||||
|
||||
@@ -2,15 +2,20 @@ from datetime import timedelta
|
||||
from itertools import cycle
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import TaskResult
|
||||
from core.tasks import cache_db_fields_task, resolve_alerts_task
|
||||
from core.utils import get_core_settings
|
||||
from tacticalrmm.constants import AgentMonType, AlertSeverity, AlertType, CheckStatus
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
from tacticalrmm.constants import (
|
||||
AgentMonType,
|
||||
AlertSeverity,
|
||||
AlertType,
|
||||
CheckStatus,
|
||||
URLActionType,
|
||||
)
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
@@ -277,12 +282,32 @@ class TestAlertsViews(TacticalTestCase):
|
||||
resp = self.client.get("/alerts/templates/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert_template = baker.make("alerts.AlertTemplate")
|
||||
url = f"/alerts/templates/{alert_template.pk}/"
|
||||
agent_script = baker.make("scripts.Script")
|
||||
server_script = baker.make("scripts.Script")
|
||||
webhook = baker.make("core.URLAction", action_type=URLActionType.REST)
|
||||
|
||||
alert_template_agent_script = baker.make(
|
||||
"alerts.AlertTemplate", action=agent_script
|
||||
)
|
||||
url = f"/alerts/templates/{alert_template_agent_script.pk}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateSerializer(alert_template)
|
||||
serializer = AlertTemplateSerializer(alert_template_agent_script)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
alert_template_server_script = baker.make(
|
||||
"alerts.AlertTemplate", action=server_script
|
||||
)
|
||||
url = f"/alerts/templates/{alert_template_server_script.pk}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateSerializer(alert_template_server_script)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
alert_template_webhook = baker.make("alerts.AlertTemplate", action_rest=webhook)
|
||||
url = f"/alerts/templates/{alert_template_webhook.pk}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateSerializer(alert_template_webhook)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
@@ -1429,6 +1454,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"run_as_user": False,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
"id": AgentHistory.objects.last().pk, # type: ignore
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
|
||||
nats_cmd.assert_called_with(data, timeout=30, wait=True)
|
||||
@@ -1460,6 +1487,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"run_as_user": False,
|
||||
"env_vars": ["resolved=action", "env=vars"],
|
||||
"id": AgentHistory.objects.last().pk, # type: ignore
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
|
||||
nats_cmd.assert_called_with(data, timeout=35, wait=True)
|
||||
|
||||
@@ -26,12 +26,12 @@ class GetAddAlerts(APIView):
|
||||
# top 10 alerts for dashboard icon
|
||||
if "top" in request.data.keys():
|
||||
alerts = (
|
||||
Alert.objects.filter_by_role(request.user)
|
||||
Alert.objects.filter_by_role(request.user) # type: ignore
|
||||
.filter(resolved=False, snoozed=False, hidden=False)
|
||||
.order_by("alert_time")[: int(request.data["top"])]
|
||||
)
|
||||
count = (
|
||||
Alert.objects.filter_by_role(request.user)
|
||||
Alert.objects.filter_by_role(request.user) # type: ignore
|
||||
.filter(resolved=False, snoozed=False, hidden=False)
|
||||
.count()
|
||||
)
|
||||
|
||||
@@ -22,4 +22,12 @@ def get_agent_config() -> AgentCheckInConfig:
|
||||
*getattr(settings, "CHECKIN_SYNCMESH", (800, 1200))
|
||||
),
|
||||
limit_data=getattr(settings, "LIMIT_DATA", False),
|
||||
install_nushell=getattr(settings, "INSTALL_NUSHELL", False),
|
||||
install_nushell_version=getattr(settings, "INSTALL_NUSHELL_VERSION", ""),
|
||||
install_nushell_url=getattr(settings, "INSTALL_NUSHELL_URL", ""),
|
||||
nushell_enable_config=getattr(settings, "NUSHELL_ENABLE_CONFIG", False),
|
||||
install_deno=getattr(settings, "INSTALL_DENO", False),
|
||||
install_deno_version=getattr(settings, "INSTALL_DENO_VERSION", ""),
|
||||
install_deno_url=getattr(settings, "INSTALL_DENO_URL", ""),
|
||||
deno_default_permissions=getattr(settings, "DENO_DEFAULT_PERMISSIONS", ""),
|
||||
)
|
||||
|
||||
@@ -12,14 +12,16 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from agents.models import Agent, AgentHistory
|
||||
from agents.models import Agent, AgentHistory, Note
|
||||
from agents.serializers import AgentHistorySerializer
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from apiv3.utils import get_agent_config
|
||||
from autotasks.models import AutomatedTask, TaskResult
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskResultSerializer
|
||||
from checks.constants import CHECK_DEFER, CHECK_RESULT_DEFER
|
||||
from checks.models import Check, CheckResult
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from core.utils import (
|
||||
download_mesh_agent,
|
||||
get_core_settings,
|
||||
@@ -31,11 +33,14 @@ from logs.models import DebugLog, PendingAction
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_DEFER,
|
||||
TRMM_MAX_REQUEST_SIZE,
|
||||
AgentHistoryType,
|
||||
AgentMonType,
|
||||
AgentPlat,
|
||||
AuditActionType,
|
||||
AuditObjType,
|
||||
CheckStatus,
|
||||
CustomFieldModel,
|
||||
DebugLogType,
|
||||
GoArch,
|
||||
MeshAgentIdent,
|
||||
@@ -338,6 +343,12 @@ class TaskRunner(APIView):
|
||||
AutomatedTask.objects.select_related("custom_field"), pk=pk
|
||||
)
|
||||
|
||||
content_length = request.META.get("CONTENT_LENGTH")
|
||||
if content_length and int(content_length) > TRMM_MAX_REQUEST_SIZE:
|
||||
request.data["stdout"] = ""
|
||||
request.data["stderr"] = "Content truncated due to excessive request size."
|
||||
request.data["retcode"] = 1
|
||||
|
||||
# get task result or create if doesn't exist
|
||||
try:
|
||||
task_result = (
|
||||
@@ -356,7 +367,7 @@ class TaskRunner(APIView):
|
||||
|
||||
AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AuditActionType.TASK_RUN,
|
||||
type=AgentHistoryType.TASK_RUN,
|
||||
command=task.name,
|
||||
script_results=request.data,
|
||||
)
|
||||
@@ -426,8 +437,8 @@ class MeshExe(APIView):
|
||||
|
||||
try:
|
||||
return download_mesh_agent(dl_url)
|
||||
except:
|
||||
return notify_error("Unable to download mesh agent exe")
|
||||
except Exception as e:
|
||||
return notify_error(f"Unable to download mesh agent: {e}")
|
||||
|
||||
|
||||
class NewAgent(APIView):
|
||||
@@ -481,6 +492,8 @@ class NewAgent(APIView):
|
||||
)
|
||||
|
||||
ret = {"pk": agent.pk, "token": token.key}
|
||||
sync_mesh_perms_task.delay()
|
||||
cache_agents_alert_template.delay()
|
||||
return Response(ret)
|
||||
|
||||
|
||||
@@ -559,12 +572,49 @@ class AgentHistoryResult(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request, agentid, pk):
|
||||
content_length = request.META.get("CONTENT_LENGTH")
|
||||
if content_length and int(content_length) > TRMM_MAX_REQUEST_SIZE:
|
||||
|
||||
request.data["script_results"]["stdout"] = ""
|
||||
request.data["script_results"][
|
||||
"stderr"
|
||||
] = "Content truncated due to excessive request size."
|
||||
request.data["script_results"]["retcode"] = 1
|
||||
|
||||
hist = get_object_or_404(
|
||||
AgentHistory.objects.filter(agent__agent_id=agentid), pk=pk
|
||||
AgentHistory.objects.select_related("custom_field").filter(
|
||||
agent__agent_id=agentid
|
||||
),
|
||||
pk=pk,
|
||||
)
|
||||
s = AgentHistorySerializer(instance=hist, data=request.data, partial=True)
|
||||
s.is_valid(raise_exception=True)
|
||||
s.save()
|
||||
|
||||
if hist.custom_field:
|
||||
if hist.custom_field.model == CustomFieldModel.AGENT:
|
||||
field = hist.custom_field.get_or_create_field_value(hist.agent)
|
||||
elif hist.custom_field.model == CustomFieldModel.CLIENT:
|
||||
field = hist.custom_field.get_or_create_field_value(hist.agent.client)
|
||||
elif hist.custom_field.model == CustomFieldModel.SITE:
|
||||
field = hist.custom_field.get_or_create_field_value(hist.agent.site)
|
||||
|
||||
r = request.data["script_results"]["stdout"]
|
||||
value = (
|
||||
r.strip()
|
||||
if hist.collector_all_output
|
||||
else r.strip().split("\n")[-1].strip()
|
||||
)
|
||||
|
||||
field.save_to_field(value)
|
||||
|
||||
if hist.save_to_agent_note:
|
||||
Note.objects.create(
|
||||
agent=hist.agent,
|
||||
user=request.user,
|
||||
note=request.data["script_results"]["stdout"],
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ class Policy(BaseAuditModel):
|
||||
old_policy: Optional[Policy] = (
|
||||
type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
)
|
||||
super(Policy, self).save(old_model=old_policy, *args, **kwargs)
|
||||
super().save(old_model=old_policy, *args, **kwargs)
|
||||
|
||||
# check if alert template was changes and cache on agents
|
||||
if old_policy:
|
||||
@@ -68,10 +68,7 @@ class Policy(BaseAuditModel):
|
||||
cache.delete_many_pattern("site_server_*")
|
||||
cache.delete_many_pattern("agent_*")
|
||||
|
||||
super(Policy, self).delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.name
|
||||
|
||||
@@ -126,7 +126,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
cache_alert_template.called_once()
|
||||
cache_alert_template.assert_called_once()
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
|
||||
@@ -7,10 +7,4 @@ class Command(BaseCommand):
|
||||
help = "Checks for orphaned tasks on all agents and removes them"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
remove_orphaned_win_tasks.s()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"The task has been initiated. Check the Debug Log in the UI for progress."
|
||||
)
|
||||
)
|
||||
remove_orphaned_win_tasks()
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-23 04:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0038_add_missing_env_vars'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='task_type',
|
||||
field=models.CharField(choices=[('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly'), ('monthlydow', 'Monthly Day of Week'), ('checkfailure', 'On Check Failure'), ('manual', 'Manual'), ('runonce', 'Run Once'), ('onboarding', 'Onboarding'), ('scheduled', 'Scheduled')], default='manual', max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0039_alter_automatedtask_task_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="taskresult",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -1,9 +1,9 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import random
|
||||
import string
|
||||
from contextlib import suppress
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
@@ -14,12 +14,11 @@ from django.db.utils import DatabaseError
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from core.utils import get_core_settings
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.constants import (
|
||||
FIELDS_TRIGGER_TASK_UPDATE_AGENT,
|
||||
POLICY_TASK_FIELDS_TO_COPY,
|
||||
AlertSeverity,
|
||||
DebugLogType,
|
||||
TaskStatus,
|
||||
TaskSyncStatus,
|
||||
TaskType,
|
||||
@@ -31,6 +30,7 @@ if TYPE_CHECKING:
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
|
||||
from tacticalrmm.helpers import has_script_actions, has_webhook
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
from tacticalrmm.utils import (
|
||||
bitdays_to_string,
|
||||
@@ -46,6 +46,9 @@ def generate_task_name() -> str:
|
||||
return "TacticalRMM_" + "".join(random.choice(chars) for i in range(35))
|
||||
|
||||
|
||||
logger = logging.getLogger("trmm")
|
||||
|
||||
|
||||
class AutomatedTask(BaseAuditModel):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
@@ -149,7 +152,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
# get old task if exists
|
||||
old_task = AutomatedTask.objects.get(pk=self.pk) if self.pk else None
|
||||
super(AutomatedTask, self).save(old_model=old_task, *args, **kwargs)
|
||||
super().save(old_model=old_task, *args, **kwargs)
|
||||
|
||||
# check if fields were updated that require a sync to the agent and set status to notsynced
|
||||
if old_task:
|
||||
@@ -172,10 +175,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
cache.delete_many_pattern("site_*_tasks")
|
||||
cache.delete_many_pattern("agent_*_tasks")
|
||||
|
||||
super(AutomatedTask, self).delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def schedule(self) -> Optional[str]:
|
||||
@@ -209,6 +209,9 @@ class AutomatedTask(BaseAuditModel):
|
||||
weeks = bitweeks_to_string(self.monthly_weeks_of_month)
|
||||
days = bitdays_to_string(self.run_time_bit_weekdays)
|
||||
return f"Runs on {months} on {weeks} on {days} at {run_time_nice}"
|
||||
elif self.task_type == TaskType.ONBOARDING:
|
||||
return "Onboarding: Runs once on task creation."
|
||||
return None
|
||||
|
||||
@property
|
||||
def fields_that_trigger_task_update_on_agent(self) -> List[str]:
|
||||
@@ -236,64 +239,56 @@ class AutomatedTask(BaseAuditModel):
|
||||
task.save()
|
||||
|
||||
# agent version >= 1.8.0
|
||||
def generate_nats_task_payload(
|
||||
self, agent: "Optional[Agent]" = None, editing: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
def generate_nats_task_payload(self) -> Dict[str, Any]:
|
||||
task = {
|
||||
"pk": self.pk,
|
||||
"type": "rmm",
|
||||
"name": self.win_task_name,
|
||||
"overwrite_task": editing,
|
||||
"overwrite_task": True,
|
||||
"enabled": self.enabled,
|
||||
"trigger": self.task_type
|
||||
if self.task_type != TaskType.CHECK_FAILURE
|
||||
else TaskType.MANUAL,
|
||||
"trigger": (
|
||||
self.task_type
|
||||
if self.task_type != TaskType.CHECK_FAILURE
|
||||
else TaskType.MANUAL
|
||||
),
|
||||
"multiple_instances": self.task_instance_policy or 0,
|
||||
"delete_expired_task_after": self.remove_if_not_scheduled
|
||||
if self.expire_date
|
||||
else False,
|
||||
"start_when_available": self.run_asap_after_missed
|
||||
if self.task_type != TaskType.RUN_ONCE
|
||||
else True,
|
||||
"delete_expired_task_after": (
|
||||
self.remove_if_not_scheduled if self.expire_date else False
|
||||
),
|
||||
"start_when_available": (
|
||||
self.run_asap_after_missed
|
||||
if self.task_type != TaskType.RUN_ONCE
|
||||
else True
|
||||
),
|
||||
}
|
||||
|
||||
if self.task_type in (
|
||||
TaskType.RUN_ONCE,
|
||||
TaskType.DAILY,
|
||||
TaskType.WEEKLY,
|
||||
TaskType.MONTHLY,
|
||||
TaskType.MONTHLY_DOW,
|
||||
TaskType.RUN_ONCE,
|
||||
):
|
||||
# set runonce task in future if creating and run_asap_after_missed is set
|
||||
if (
|
||||
not editing
|
||||
and self.task_type == TaskType.RUN_ONCE
|
||||
and self.run_asap_after_missed
|
||||
and agent
|
||||
and self.run_time_date.replace(tzinfo=ZoneInfo(agent.timezone))
|
||||
< djangotime.now().astimezone(ZoneInfo(agent.timezone))
|
||||
):
|
||||
self.run_time_date = (
|
||||
djangotime.now() + djangotime.timedelta(minutes=5)
|
||||
).astimezone(ZoneInfo(agent.timezone))
|
||||
if not self.run_time_date:
|
||||
self.run_time_date = djangotime.now()
|
||||
|
||||
task["start_year"] = int(self.run_time_date.strftime("%Y"))
|
||||
task["start_month"] = int(self.run_time_date.strftime("%-m"))
|
||||
task["start_day"] = int(self.run_time_date.strftime("%-d"))
|
||||
task["start_hour"] = int(self.run_time_date.strftime("%-H"))
|
||||
task["start_min"] = int(self.run_time_date.strftime("%-M"))
|
||||
task["start_year"] = self.run_time_date.year
|
||||
task["start_month"] = self.run_time_date.month
|
||||
task["start_day"] = self.run_time_date.day
|
||||
task["start_hour"] = self.run_time_date.hour
|
||||
task["start_min"] = self.run_time_date.minute
|
||||
|
||||
if self.expire_date:
|
||||
task["expire_year"] = int(self.expire_date.strftime("%Y"))
|
||||
task["expire_month"] = int(self.expire_date.strftime("%-m"))
|
||||
task["expire_day"] = int(self.expire_date.strftime("%-d"))
|
||||
task["expire_hour"] = int(self.expire_date.strftime("%-H"))
|
||||
task["expire_min"] = int(self.expire_date.strftime("%-M"))
|
||||
task["expire_year"] = self.expire_date.year
|
||||
task["expire_month"] = self.expire_date.month
|
||||
task["expire_day"] = self.expire_date.day
|
||||
task["expire_hour"] = self.expire_date.hour
|
||||
task["expire_min"] = self.expire_date.minute
|
||||
|
||||
if self.random_task_delay:
|
||||
task["random_delay"] = convert_to_iso_duration(self.random_task_delay)
|
||||
|
||||
if self.task_repetition_interval:
|
||||
if self.task_repetition_interval and self.task_repetition_duration:
|
||||
task["repetition_interval"] = convert_to_iso_duration(
|
||||
self.task_repetition_interval
|
||||
)
|
||||
@@ -341,27 +336,24 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": self.generate_nats_task_payload(agent),
|
||||
"schedtaskpayload": self.generate_nats_task_payload(),
|
||||
}
|
||||
logger.debug(nats_data)
|
||||
|
||||
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=5))
|
||||
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok":
|
||||
task_result.sync_status = TaskSyncStatus.INITIAL
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"Unable to create scheduled task {self.name} on {task_result.agent.hostname}. It will be created when the agent checks in.",
|
||||
logger.error(
|
||||
f"Unable to create scheduled task {self.name} on {task_result.agent.hostname}: {r}"
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
task_result.sync_status = TaskSyncStatus.SYNCED
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"{task_result.agent.hostname} task {self.name} was successfully created",
|
||||
logger.info(
|
||||
f"{task_result.agent.hostname} task {self.name} was successfully created."
|
||||
)
|
||||
|
||||
return "ok"
|
||||
@@ -380,27 +372,24 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": self.generate_nats_task_payload(editing=True),
|
||||
"schedtaskpayload": self.generate_nats_task_payload(),
|
||||
}
|
||||
logger.debug(nats_data)
|
||||
|
||||
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=5))
|
||||
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok":
|
||||
task_result.sync_status = TaskSyncStatus.NOT_SYNCED
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"Unable to modify scheduled task {self.name} on {task_result.agent.hostname}({task_result.agent.agent_id}). It will try again on next agent checkin",
|
||||
logger.error(
|
||||
f"Unable to modify scheduled task {self.name} on {task_result.agent.hostname}: {r}"
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
task_result.sync_status = TaskSyncStatus.SYNCED
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"{task_result.agent.hostname} task {self.name} was successfully modified",
|
||||
logger.info(
|
||||
f"{task_result.agent.hostname} task {self.name} was successfully modified."
|
||||
)
|
||||
|
||||
return "ok"
|
||||
@@ -429,20 +418,13 @@ class AutomatedTask(BaseAuditModel):
|
||||
with suppress(DatabaseError):
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"{task_result.agent.hostname} task {self.name} will be deleted on next checkin",
|
||||
logger.error(
|
||||
f"Unable to delete task {self.name} on {task_result.agent.hostname}: {r}"
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.delete()
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"{task_result.agent.hostname}({task_result.agent.agent_id}) task {self.name} was deleted",
|
||||
)
|
||||
|
||||
logger.info(f"{task_result.agent.hostname} task {self.name} was deleted.")
|
||||
return "ok"
|
||||
|
||||
def run_win_task(self, agent: "Optional[Agent]" = None) -> str:
|
||||
@@ -465,18 +447,19 @@ class AutomatedTask(BaseAuditModel):
|
||||
return "ok"
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
has_autotask_notification = (
|
||||
self.dashboard_alert or self.email_alert or self.text_alert
|
||||
)
|
||||
has_alert_template_notification = alert_template and (
|
||||
alert_template.task_always_alert
|
||||
or alert_template.task_always_email
|
||||
or alert_template.task_always_text
|
||||
)
|
||||
return (
|
||||
self.dashboard_alert
|
||||
or self.email_alert
|
||||
or self.text_alert
|
||||
or (
|
||||
alert_template
|
||||
and (
|
||||
alert_template.task_always_alert
|
||||
or alert_template.task_always_email
|
||||
or alert_template.task_always_text
|
||||
)
|
||||
)
|
||||
has_autotask_notification
|
||||
or has_alert_template_notification
|
||||
or has_webhook(alert_template, "task")
|
||||
or has_script_actions(alert_template, "task")
|
||||
)
|
||||
|
||||
|
||||
@@ -486,6 +469,7 @@ class TaskResult(models.Model):
|
||||
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="taskresults",
|
||||
|
||||
@@ -2,6 +2,7 @@ from datetime import datetime
|
||||
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework import serializers
|
||||
from django.conf import settings
|
||||
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.constants import TaskType
|
||||
@@ -257,6 +258,8 @@ class TaskGOGetSerializer(serializers.ModelSerializer):
|
||||
shell=script.shell,
|
||||
env_vars=env_vars,
|
||||
),
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
if actions_to_remove:
|
||||
|
||||
@@ -417,7 +417,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "daily",
|
||||
"multiple_instances": 1,
|
||||
@@ -431,7 +431,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"day_interval": 1,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
self.assertEqual(
|
||||
@@ -470,7 +470,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "weekly",
|
||||
"multiple_instances": 2,
|
||||
@@ -490,7 +490,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"days_of_week": 127,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
@@ -518,7 +518,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "monthly",
|
||||
"multiple_instances": 1,
|
||||
@@ -538,7 +538,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"months_of_year": 1024,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
@@ -562,7 +562,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "monthlydow",
|
||||
"multiple_instances": 1,
|
||||
@@ -578,7 +578,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"weeks_of_month": 3,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
@@ -600,7 +600,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "runonce",
|
||||
"multiple_instances": 1,
|
||||
@@ -613,39 +613,10 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"start_min": int(task1.run_time_date.strftime("%-M")),
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# test runonce with date in the past
|
||||
task1 = baker.make(
|
||||
"autotasks.AutomatedTask",
|
||||
agent=agent,
|
||||
name="test task 3",
|
||||
task_type=TaskType.RUN_ONCE,
|
||||
run_asap_after_missed=True,
|
||||
run_time_date=djangotime.datetime(2018, 6, 1, 23, 23, 23),
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
create_win_task_schedule(pk=task1.pk)
|
||||
nats_cmd.assert_called()
|
||||
|
||||
# check if task is scheduled for at most 5min in the future
|
||||
_, args, _ = nats_cmd.mock_calls[0]
|
||||
|
||||
current_minute = int(djangotime.now().strftime("%-M"))
|
||||
|
||||
if current_minute >= 55 and current_minute < 60:
|
||||
self.assertLess(
|
||||
args[0]["schedtaskpayload"]["start_min"],
|
||||
int(djangotime.now().strftime("%-M")),
|
||||
)
|
||||
else:
|
||||
self.assertGreater(
|
||||
args[0]["schedtaskpayload"]["start_min"],
|
||||
int(djangotime.now().strftime("%-M")),
|
||||
)
|
||||
|
||||
# test checkfailure task
|
||||
nats_cmd.reset_mock()
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
@@ -665,7 +636,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "manual",
|
||||
"multiple_instances": 1,
|
||||
@@ -673,7 +644,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"start_when_available": False,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
@@ -692,7 +663,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "manual",
|
||||
"multiple_instances": 1,
|
||||
@@ -700,7 +671,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"start_when_available": False,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from packaging import version as pyver
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
@@ -6,6 +7,8 @@ from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from automation.models import Policy
|
||||
from tacticalrmm.constants import TaskType
|
||||
from tacticalrmm.helpers import notify_error
|
||||
from tacticalrmm.permissions import _has_perm_on_agent
|
||||
|
||||
from .models import AutomatedTask
|
||||
@@ -40,6 +43,11 @@ class GetAddAutoTasks(APIView):
|
||||
if not _has_perm_on_agent(request.user, agent.agent_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
if data["task_type"] == TaskType.ONBOARDING and pyver.parse(
|
||||
agent.version
|
||||
) < pyver.parse("2.6.0"):
|
||||
return notify_error("Onboarding tasks require agent >= 2.6.0")
|
||||
|
||||
data["agent"] = agent.pk
|
||||
|
||||
serializer = TaskSerializer(data=data)
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0031_check_env_vars"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="checkresult",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -19,6 +19,7 @@ from tacticalrmm.constants import (
|
||||
EvtLogNames,
|
||||
EvtLogTypes,
|
||||
)
|
||||
from tacticalrmm.helpers import has_script_actions, has_webhook
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -168,10 +169,7 @@ class Check(BaseAuditModel):
|
||||
elif self.agent:
|
||||
cache.delete(f"agent_{self.agent.agent_id}_checks")
|
||||
|
||||
super(Check, self).save(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
# if check is a policy check clear cache on everything
|
||||
@@ -183,10 +181,7 @@ class Check(BaseAuditModel):
|
||||
elif self.agent:
|
||||
cache.delete(f"agent_{self.agent.agent_id}_checks")
|
||||
|
||||
super(Check, self).delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def readable_desc(self):
|
||||
@@ -236,18 +231,19 @@ class Check(BaseAuditModel):
|
||||
check.save()
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
has_check_notifications = (
|
||||
self.dashboard_alert or self.email_alert or self.text_alert
|
||||
)
|
||||
has_alert_template_notification = alert_template and (
|
||||
alert_template.check_always_alert
|
||||
or alert_template.check_always_email
|
||||
or alert_template.check_always_text
|
||||
)
|
||||
return (
|
||||
self.dashboard_alert
|
||||
or self.email_alert
|
||||
or self.text_alert
|
||||
or (
|
||||
alert_template
|
||||
and (
|
||||
alert_template.check_always_alert
|
||||
or alert_template.check_always_email
|
||||
or alert_template.check_always_text
|
||||
)
|
||||
)
|
||||
has_check_notifications
|
||||
or has_alert_template_notification
|
||||
or has_webhook(alert_template, "check")
|
||||
or has_script_actions(alert_template, "check")
|
||||
)
|
||||
|
||||
def add_check_history(
|
||||
@@ -290,6 +286,7 @@ class CheckResult(models.Model):
|
||||
class Meta:
|
||||
unique_together = (("agent", "assigned_check"),)
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="checkresults",
|
||||
@@ -338,10 +335,7 @@ class CheckResult(models.Model):
|
||||
):
|
||||
self.alert_severity = AlertSeverity.WARNING
|
||||
|
||||
super(CheckResult, self).save(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def history_info(self):
|
||||
@@ -371,9 +365,11 @@ class CheckResult(models.Model):
|
||||
if len(self.history) > 15:
|
||||
self.history = self.history[-15:]
|
||||
|
||||
update_fields.extend(["history"])
|
||||
update_fields.extend(["history", "more_info"])
|
||||
|
||||
avg = int(mean(self.history))
|
||||
txt = "Memory Usage" if check.check_type == CheckType.MEMORY else "CPU Load"
|
||||
self.more_info = f"Average {txt}: {avg}%"
|
||||
|
||||
if check.error_threshold and avg > check.error_threshold:
|
||||
self.status = CheckStatus.FAILING
|
||||
@@ -673,6 +669,7 @@ class CheckResult(models.Model):
|
||||
class CheckHistory(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
check_id = models.PositiveIntegerField(default=0)
|
||||
agent_id = models.CharField(max_length=200, null=True, blank=True)
|
||||
x = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
@@ -177,8 +177,7 @@ class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
return Script.parse_script_env_vars(
|
||||
agent=agent,
|
||||
shell=obj.script.shell,
|
||||
env_vars=obj.env_vars
|
||||
or obj.script.env_vars, # check's env_vars override the script's env vars
|
||||
env_vars=obj.env_vars,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -8,6 +8,7 @@ from alerts.models import Alert
|
||||
from checks.models import CheckResult
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.helpers import rand_range
|
||||
from tacticalrmm.logger import logger
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -120,9 +121,9 @@ def handle_resolved_check_email_alert_task(pk: int) -> str:
|
||||
def prune_check_history(older_than_days: int) -> str:
|
||||
from .models import CheckHistory
|
||||
|
||||
CheckHistory.objects.filter(
|
||||
x__lt=djangotime.make_aware(dt.datetime.today())
|
||||
- djangotime.timedelta(days=older_than_days)
|
||||
c, _ = CheckHistory.objects.filter(
|
||||
x__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
logger.info(f"Pruned {c} check history objects")
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -49,11 +49,7 @@ class Client(BaseAuditModel):
|
||||
|
||||
# get old client if exists
|
||||
old_client = Client.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Client, self).save(
|
||||
old_model=old_client,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(old_model=old_client, *args, **kwargs)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_client and (
|
||||
@@ -129,11 +125,7 @@ class Site(BaseAuditModel):
|
||||
|
||||
# get old client if exists
|
||||
old_site = Site.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(
|
||||
old_model=old_site,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(old_model=old_site, *args, **kwargs)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_site:
|
||||
@@ -141,6 +133,7 @@ class Site(BaseAuditModel):
|
||||
old_site.alert_template != self.alert_template
|
||||
or old_site.workstation_policy != self.workstation_policy
|
||||
or old_site.server_policy != self.server_policy
|
||||
or old_site.client != self.client
|
||||
):
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
|
||||
@@ -88,6 +88,7 @@ class TestClientViews(TacticalTestCase):
|
||||
"client": {"name": "Setup Client"},
|
||||
"site": {"name": "Setup Site"},
|
||||
"timezone": "America/Los_Angeles",
|
||||
"companyname": "TestCo Inc.",
|
||||
"initialsetup": True,
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
|
||||
@@ -92,7 +92,8 @@ class GetAddClients(APIView):
|
||||
if "initialsetup" in request.data.keys():
|
||||
core = get_core_settings()
|
||||
core.default_time_zone = request.data["timezone"]
|
||||
core.save(update_fields=["default_time_zone"])
|
||||
core.mesh_company_name = request.data["companyname"]
|
||||
core.save(update_fields=["default_time_zone", "mesh_company_name"])
|
||||
|
||||
# save custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
@@ -41,6 +41,7 @@ agentBin="${agentBinPath}/${binName}"
|
||||
agentConf='/etc/tacticalagent'
|
||||
agentSvcName='tacticalagent.service'
|
||||
agentSysD="/etc/systemd/system/${agentSvcName}"
|
||||
agentDir='/opt/tacticalagent'
|
||||
meshDir='/opt/tacticalmesh'
|
||||
meshSystemBin="${meshDir}/meshagent"
|
||||
meshSvcName='meshagent.service'
|
||||
@@ -65,16 +66,20 @@ RemoveOldAgent() {
|
||||
if [ -f "${agentSysD}" ]; then
|
||||
systemctl disable ${agentSvcName}
|
||||
systemctl stop ${agentSvcName}
|
||||
rm -f ${agentSysD}
|
||||
rm -f "${agentSysD}"
|
||||
systemctl daemon-reload
|
||||
fi
|
||||
|
||||
if [ -f "${agentConf}" ]; then
|
||||
rm -f ${agentConf}
|
||||
rm -f "${agentConf}"
|
||||
fi
|
||||
|
||||
if [ -f "${agentBin}" ]; then
|
||||
rm -f ${agentBin}
|
||||
rm -f "${agentBin}"
|
||||
fi
|
||||
|
||||
if [ -d "${agentDir}" ]; then
|
||||
rm -rf "${agentDir}"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -132,16 +137,18 @@ Uninstall() {
|
||||
RemoveOldAgent
|
||||
}
|
||||
|
||||
if [ $# -ne 0 ] && [ $1 == 'uninstall' ]; then
|
||||
if [ $# -ne 0 ] && [[ $1 =~ ^(uninstall|-uninstall|--uninstall)$ ]]; then
|
||||
Uninstall
|
||||
# Remove the current script
|
||||
rm "$0"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
case $1 in
|
||||
--debug) DEBUG=1 ;;
|
||||
--insecure) INSECURE=1 ;;
|
||||
--nomesh) NOMESH=1 ;;
|
||||
-debug | --debug | debug) DEBUG=1 ;;
|
||||
-insecure | --insecure | insecure) INSECURE=1 ;;
|
||||
-nomesh | --nomesh | nomesh) NOMESH=1 ;;
|
||||
*)
|
||||
echo "ERROR: Unknown parameter: $1"
|
||||
exit 1
|
||||
|
||||
@@ -1,15 +1,38 @@
|
||||
import asyncio
|
||||
import fcntl
|
||||
import os
|
||||
import pty
|
||||
import select
|
||||
import signal
|
||||
import struct
|
||||
import subprocess
|
||||
import termios
|
||||
import threading
|
||||
import uuid
|
||||
from contextlib import suppress
|
||||
|
||||
from channels.db import database_sync_to_async
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer, JsonWebsocketConsumer
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.db.models import F
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from tacticalrmm.constants import AgentMonType
|
||||
from tacticalrmm.helpers import days_until_cert_expires
|
||||
from tacticalrmm.logger import logger
|
||||
|
||||
|
||||
def _has_perm(user, perm: str) -> bool:
|
||||
if user.is_superuser or (user.role and getattr(user.role, "is_superuser")):
|
||||
return True
|
||||
|
||||
# make sure non-superusers with empty roles aren't permitted
|
||||
elif not user.role:
|
||||
return False
|
||||
|
||||
return user.role and getattr(user.role, perm)
|
||||
|
||||
|
||||
class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
@@ -18,6 +41,11 @@ class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
|
||||
if isinstance(self.user, AnonymousUser):
|
||||
await self.close()
|
||||
return
|
||||
|
||||
if self.user.block_dashboard_login:
|
||||
await self.close()
|
||||
return
|
||||
|
||||
await self.accept()
|
||||
self.connected = True
|
||||
@@ -62,13 +90,15 @@ class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
return {
|
||||
"total_server_offline_count": offline_server_agents_count,
|
||||
"total_workstation_offline_count": offline_workstation_agents_count,
|
||||
"total_server_count": total_server_agents_count,
|
||||
"total_workstation_count": total_workstation_agents_count,
|
||||
"days_until_cert_expires": days_until_cert_expires(),
|
||||
"action": "dashboard.agentcount",
|
||||
"data": {
|
||||
"total_server_offline_count": offline_server_agents_count,
|
||||
"total_workstation_offline_count": offline_workstation_agents_count,
|
||||
"total_server_count": total_server_agents_count,
|
||||
"total_workstation_count": total_workstation_agents_count,
|
||||
"days_until_cert_expires": days_until_cert_expires(),
|
||||
},
|
||||
}
|
||||
|
||||
async def send_dash_info(self):
|
||||
@@ -76,3 +106,137 @@ class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
c = await self.get_dashboard_info()
|
||||
await self.send_json(c)
|
||||
await asyncio.sleep(30)
|
||||
|
||||
|
||||
class TerminalConsumer(JsonWebsocketConsumer):
|
||||
child_pid = None
|
||||
fd = None
|
||||
shell = None
|
||||
command = ["/bin/bash"]
|
||||
user = None
|
||||
subprocess = None
|
||||
authorized = False
|
||||
connected = False
|
||||
|
||||
def run_command(self):
|
||||
master_fd, slave_fd = pty.openpty()
|
||||
|
||||
self.fd = master_fd
|
||||
env = os.environ.copy()
|
||||
env["TERM"] = "xterm"
|
||||
|
||||
with subprocess.Popen( # pylint: disable=subprocess-popen-preexec-fn
|
||||
self.command,
|
||||
stdin=slave_fd,
|
||||
stdout=slave_fd,
|
||||
stderr=slave_fd,
|
||||
preexec_fn=os.setsid,
|
||||
env=env,
|
||||
cwd=os.getenv("HOME", os.getcwd()),
|
||||
) as proc:
|
||||
self.subprocess = proc
|
||||
self.child_pid = proc.pid
|
||||
proc.wait()
|
||||
|
||||
# Subprocess has finished, close the websocket
|
||||
# happens when process exits, either via user exiting using exit() or by error
|
||||
self.subprocess = None
|
||||
self.child_pid = None
|
||||
if self.connected:
|
||||
self.connected = False
|
||||
self.close(4030)
|
||||
|
||||
def connect(self):
|
||||
if "user" not in self.scope:
|
||||
self.close(4401)
|
||||
return
|
||||
|
||||
self.user = self.scope["user"]
|
||||
|
||||
if isinstance(self.user, AnonymousUser):
|
||||
self.close()
|
||||
return
|
||||
|
||||
if not self.user.is_authenticated:
|
||||
self.close(4401)
|
||||
return
|
||||
|
||||
core: CoreSettings = CoreSettings.objects.first() # type: ignore
|
||||
if not core.web_terminal_enabled:
|
||||
self.close(4401)
|
||||
return
|
||||
|
||||
if self.user.block_dashboard_login or not _has_perm(
|
||||
self.user, "can_use_webterm"
|
||||
):
|
||||
self.close(4401)
|
||||
return
|
||||
|
||||
if self.child_pid is not None:
|
||||
return
|
||||
|
||||
self.connected = True
|
||||
self.authorized = True
|
||||
self.accept()
|
||||
|
||||
# Daemonize the thread so it automatically dies when the main thread exits
|
||||
thread = threading.Thread(target=self.run_command, daemon=True)
|
||||
thread.start()
|
||||
|
||||
thread = threading.Thread(target=self.read_from_pty, daemon=True)
|
||||
thread.start()
|
||||
|
||||
def read_from_pty(self):
|
||||
while True:
|
||||
select.select([self.fd], [], [])
|
||||
output = os.read(self.fd, 1024)
|
||||
if not output:
|
||||
break
|
||||
message = output.decode(errors="ignore")
|
||||
self.send_json(
|
||||
{
|
||||
"action": "trmmcli.output",
|
||||
"data": {"output": message, "messageId": str(uuid.uuid4())},
|
||||
}
|
||||
)
|
||||
|
||||
def resize(self, row, col, xpix=0, ypix=0):
|
||||
winsize = struct.pack("HHHH", row, col, xpix, ypix)
|
||||
fcntl.ioctl(self.fd, termios.TIOCSWINSZ, winsize)
|
||||
|
||||
def write_to_pty(self, message):
|
||||
os.write(self.fd, message.encode())
|
||||
|
||||
def kill_pty(self):
|
||||
if self.subprocess is not None:
|
||||
try:
|
||||
os.killpg(os.getpgid(self.child_pid), signal.SIGKILL)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to kill process group: {str(e)}")
|
||||
finally:
|
||||
self.subprocess = None
|
||||
self.child_pid = None
|
||||
|
||||
def disconnect(self, code):
|
||||
self.connected = False
|
||||
self.kill_pty()
|
||||
|
||||
def receive_json(self, data):
|
||||
if not self.authorized:
|
||||
return
|
||||
|
||||
action = data.get("action", None)
|
||||
|
||||
if not action:
|
||||
return
|
||||
|
||||
if action == "trmmcli.resize":
|
||||
self.resize(data["data"]["rows"], data["data"]["cols"])
|
||||
elif action == "trmmcli.input":
|
||||
message = data["data"]["input"]
|
||||
self.write_to_pty(message)
|
||||
elif action == "trmmcli.disconnect":
|
||||
self.kill_pty()
|
||||
self.send_json(
|
||||
{"action": "trmmcli.output", "data": {"output": "Terminal killed!"}}
|
||||
)
|
||||
|
||||
@@ -27,7 +27,7 @@ class Command(BaseCommand):
|
||||
self._warning("Mesh device group:", core.mesh_device_group)
|
||||
|
||||
try:
|
||||
token = get_auth_token(core.mesh_username, core.mesh_token)
|
||||
token = get_auth_token(core.mesh_api_superuser, core.mesh_token)
|
||||
except Exception as e:
|
||||
self._error("Error getting auth token:")
|
||||
self._error(str(e))
|
||||
|
||||
@@ -5,6 +5,7 @@ from tacticalrmm.constants import (
|
||||
AGENT_OUTAGES_LOCK,
|
||||
ORPHANED_WIN_TASK_LOCK,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_MESH_PERMS_TASK_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
)
|
||||
|
||||
@@ -18,5 +19,6 @@ class Command(BaseCommand):
|
||||
ORPHANED_WIN_TASK_LOCK,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
SYNC_MESH_PERMS_TASK_LOCK,
|
||||
):
|
||||
cache.delete(key)
|
||||
|
||||
@@ -4,7 +4,7 @@ import os
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from tacticalrmm.helpers import get_nats_internal_protocol, get_nats_ports
|
||||
from tacticalrmm.helpers import get_nats_url
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -20,11 +20,9 @@ class Command(BaseCommand):
|
||||
else:
|
||||
ssl = "disable"
|
||||
|
||||
nats_std_port, _ = get_nats_ports()
|
||||
proto = get_nats_internal_protocol()
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"{proto}://{settings.ALLOWED_HOSTS[0]}:{nats_std_port}",
|
||||
"natsurl": get_nats_url(),
|
||||
"user": db["USER"],
|
||||
"pass": db["PASSWORD"],
|
||||
"host": db["HOST"],
|
||||
|
||||
@@ -24,8 +24,8 @@ class Command(BaseCommand):
|
||||
try:
|
||||
ram = math.ceil(psutil.virtual_memory().total / (1024**3))
|
||||
if ram <= 2:
|
||||
max_requests = 30
|
||||
max_workers = 10
|
||||
max_requests = 15
|
||||
max_workers = 6
|
||||
elif ram <= 4:
|
||||
max_requests = 75
|
||||
max_workers = 20
|
||||
|
||||
@@ -3,7 +3,7 @@ from urllib.parse import urlparse
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from tacticalrmm.helpers import get_webdomain
|
||||
from tacticalrmm.util_settings import get_backend_url, get_root_domain, get_webdomain
|
||||
from tacticalrmm.utils import get_certs
|
||||
|
||||
|
||||
@@ -17,6 +17,8 @@ class Command(BaseCommand):
|
||||
match kwargs["name"]:
|
||||
case "api":
|
||||
self.stdout.write(settings.ALLOWED_HOSTS[0])
|
||||
case "rootdomain":
|
||||
self.stdout.write(get_root_domain(settings.ALLOWED_HOSTS[0]))
|
||||
case "version":
|
||||
self.stdout.write(settings.TRMM_VERSION)
|
||||
case "webversion":
|
||||
@@ -27,8 +29,16 @@ class Command(BaseCommand):
|
||||
self.stdout.write(settings.NATS_SERVER_VER)
|
||||
case "frontend":
|
||||
self.stdout.write(settings.CORS_ORIGIN_WHITELIST[0])
|
||||
case "backend_url":
|
||||
self.stdout.write(
|
||||
get_backend_url(
|
||||
settings.ALLOWED_HOSTS[0],
|
||||
settings.TRMM_PROTO,
|
||||
settings.TRMM_BACKEND_PORT,
|
||||
)
|
||||
)
|
||||
case "webdomain":
|
||||
self.stdout.write(get_webdomain())
|
||||
self.stdout.write(get_webdomain(settings.CORS_ORIGIN_WHITELIST[0]))
|
||||
case "djangoadmin":
|
||||
url = f"https://{settings.ALLOWED_HOSTS[0]}/{settings.ADMIN_URL}"
|
||||
self.stdout.write(url)
|
||||
|
||||
@@ -5,13 +5,14 @@ import websockets
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.utils import get_mesh_ws_url
|
||||
from tacticalrmm.constants import TRMM_WS_MAX_SIZE
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, uri):
|
||||
async with websockets.connect(uri) as websocket:
|
||||
async with websockets.connect(uri, max_size=TRMM_WS_MAX_SIZE) as websocket:
|
||||
# Get Invitation Link
|
||||
await websocket.send(
|
||||
json.dumps(
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from meshctrl.utils import get_login_token
|
||||
|
||||
from core.utils import get_core_settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "generate a url to login to mesh as the superuser"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
core = get_core_settings()
|
||||
|
||||
token = get_login_token(key=core.mesh_token, user=f"user//{core.mesh_username}")
|
||||
token_param = f"login={token}&"
|
||||
|
||||
control = f"{core.mesh_site}/?{token_param}"
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(control))
|
||||
@@ -6,13 +6,14 @@ from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.utils import get_core_settings, get_mesh_ws_url
|
||||
from tacticalrmm.constants import TRMM_WS_MAX_SIZE
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, uri):
|
||||
async with websockets.connect(uri) as websocket:
|
||||
async with websockets.connect(uri, max_size=TRMM_WS_MAX_SIZE) as websocket:
|
||||
# Get Device groups to see if it exists
|
||||
await websocket.send(json.dumps({"action": "meshes"}))
|
||||
|
||||
|
||||
@@ -6,6 +6,8 @@ from accounts.models import User
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check, CheckHistory
|
||||
from core.models import CoreSettings
|
||||
from core.tasks import remove_orphaned_history_results, sync_mesh_perms_task
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.constants import AGENT_DEFER, ScriptType
|
||||
|
||||
@@ -54,4 +56,22 @@ class Command(BaseCommand):
|
||||
|
||||
agent.save(update_fields=["goarch"])
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Checking for orphaned history results...")
|
||||
)
|
||||
count = remove_orphaned_history_results()
|
||||
if count:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Removed {count} orphaned history results.")
|
||||
)
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
if core.sync_mesh_with_trmm:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Syncing trmm users/permissions with meshcentral, this might take a long time...please wait..."
|
||||
)
|
||||
)
|
||||
sync_mesh_perms_task()
|
||||
|
||||
self.stdout.write("Post update tasks finished")
|
||||
|
||||
@@ -8,6 +8,7 @@ from core.tasks import (
|
||||
core_maintenance_tasks,
|
||||
resolve_alerts_task,
|
||||
resolve_pending_actions,
|
||||
sync_mesh_perms_task,
|
||||
sync_scheduled_tasks,
|
||||
)
|
||||
from winupdate.tasks import auto_approve_updates_task, check_agent_update_schedule_task
|
||||
@@ -28,3 +29,4 @@ class Command(BaseCommand):
|
||||
remove_orphaned_win_tasks.delay()
|
||||
auto_approve_updates_task.delay()
|
||||
check_agent_update_schedule_task.delay()
|
||||
sync_mesh_perms_task.delay()
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sync mesh users/perms with trmm users/perms"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Syncing trmm users/permissions with meshcentral, this might take a long time...please wait..."
|
||||
)
|
||||
)
|
||||
sync_mesh_perms_task()
|
||||
183
api/tacticalrmm/core/mesh_utils.py
Normal file
183
api/tacticalrmm/core/mesh_utils.py
Normal file
@@ -0,0 +1,183 @@
|
||||
import asyncio
|
||||
import json
|
||||
import re
|
||||
import secrets
|
||||
import string
|
||||
import traceback
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import websockets
|
||||
|
||||
from accounts.utils import is_superuser
|
||||
from tacticalrmm.constants import TRMM_WS_MAX_SIZE
|
||||
from tacticalrmm.logger import logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
def build_mesh_display_name(
|
||||
*, first_name: str | None, last_name: str | None, company_name: str | None
|
||||
) -> str:
|
||||
ret = ""
|
||||
if first_name:
|
||||
ret += first_name
|
||||
|
||||
if last_name:
|
||||
ret += f" {last_name}"
|
||||
|
||||
if ret and company_name:
|
||||
ret += f" - {company_name}"
|
||||
elif company_name:
|
||||
ret += company_name
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def has_mesh_perms(*, user: "User") -> bool:
|
||||
if user.is_superuser or is_superuser(user):
|
||||
return True
|
||||
|
||||
return user.role and getattr(user.role, "can_use_mesh")
|
||||
|
||||
|
||||
def make_mesh_password() -> str:
|
||||
alpha = string.ascii_letters + string.digits
|
||||
nonalpha = "!@#$"
|
||||
passwd = [secrets.choice(alpha) for _ in range(29)] + [secrets.choice(nonalpha)]
|
||||
secrets.SystemRandom().shuffle(passwd)
|
||||
return "".join(passwd)
|
||||
|
||||
|
||||
def transform_trmm(obj):
|
||||
ret = []
|
||||
try:
|
||||
for node in obj:
|
||||
node_id = node["node_id"]
|
||||
user_ids = [link["_id"] for link in node["links"]]
|
||||
ret.append({"node_id": node_id, "user_ids": user_ids})
|
||||
except Exception:
|
||||
logger.debug(traceback.format_exc)
|
||||
return ret
|
||||
|
||||
|
||||
def transform_mesh(obj):
|
||||
pattern = re.compile(r".*___\d+")
|
||||
ret = []
|
||||
try:
|
||||
for _, nodes in obj.items():
|
||||
for node in nodes:
|
||||
node_id = node["_id"]
|
||||
try:
|
||||
user_ids = [
|
||||
user_id
|
||||
for user_id in node["links"].keys()
|
||||
if pattern.match(user_id)
|
||||
]
|
||||
except KeyError:
|
||||
# will trigger on initial sync cuz no mesh users yet
|
||||
# also triggers for invalid agents after sync
|
||||
pass
|
||||
else:
|
||||
ret.append({"node_id": node_id, "user_ids": user_ids})
|
||||
|
||||
except Exception:
|
||||
logger.debug(traceback.format_exc)
|
||||
return ret
|
||||
|
||||
|
||||
class MeshSync:
|
||||
def __init__(self, uri: str):
|
||||
self.uri = uri
|
||||
self.mesh_users = self.get_trmm_mesh_users() # full list
|
||||
|
||||
def mesh_action(
|
||||
self, *, payload: dict[str, Any], wait=True
|
||||
) -> dict[str, Any] | None:
|
||||
async def _do(payload):
|
||||
async with websockets.connect(self.uri, max_size=TRMM_WS_MAX_SIZE) as ws:
|
||||
await ws.send(json.dumps(payload))
|
||||
if wait:
|
||||
while 1:
|
||||
try:
|
||||
message = await asyncio.wait_for(ws.recv(), 120)
|
||||
r = json.loads(message)
|
||||
if r["action"] == payload["action"]:
|
||||
return r
|
||||
except asyncio.TimeoutError:
|
||||
logger.error("Timeout reached.")
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
payload["responseid"] = "meshctrl"
|
||||
logger.debug(payload)
|
||||
|
||||
return asyncio.run(_do(payload))
|
||||
|
||||
def get_unique_mesh_users(
|
||||
self, trmm_agents_list: list[dict[str, Any]]
|
||||
) -> list[str]:
|
||||
userids = [i["links"] for i in trmm_agents_list]
|
||||
all_ids = [item["_id"] for sublist in userids for item in sublist]
|
||||
return list(set(all_ids))
|
||||
|
||||
def get_trmm_mesh_users(self):
|
||||
payload = {"action": "users"}
|
||||
ret = {
|
||||
i["_id"]: i
|
||||
for i in self.mesh_action(payload=payload, wait=True)["users"]
|
||||
if re.search(r".*___\d+", i["_id"])
|
||||
}
|
||||
return ret
|
||||
|
||||
def add_users_to_node(self, *, node_id: str, user_ids: list[str]):
|
||||
|
||||
payload = {
|
||||
"action": "adddeviceuser",
|
||||
"nodeid": node_id,
|
||||
"usernames": [s.replace("user//", "") for s in user_ids],
|
||||
"rights": 4088024,
|
||||
"remove": False,
|
||||
}
|
||||
self.mesh_action(payload=payload, wait=False)
|
||||
|
||||
def delete_users_from_node(self, *, node_id: str, user_ids: list[str]):
|
||||
payload = {
|
||||
"action": "adddeviceuser",
|
||||
"nodeid": node_id,
|
||||
"userids": user_ids,
|
||||
"rights": 0,
|
||||
"remove": True,
|
||||
}
|
||||
self.mesh_action(payload=payload, wait=False)
|
||||
|
||||
def update_mesh_displayname(self, *, user_info: dict[str, Any]) -> None:
|
||||
payload = {
|
||||
"action": "edituser",
|
||||
"id": user_info["_id"],
|
||||
"realname": user_info["full_name"],
|
||||
}
|
||||
self.mesh_action(payload=payload, wait=False)
|
||||
|
||||
def add_user_to_mesh(self, *, user_info: dict[str, Any]) -> None:
|
||||
payload = {
|
||||
"action": "adduser",
|
||||
"username": user_info["username"],
|
||||
"email": user_info["email"],
|
||||
"pass": make_mesh_password(),
|
||||
"resetNextLogin": False,
|
||||
"randomPassword": False,
|
||||
"removeEvents": False,
|
||||
"emailVerified": True,
|
||||
}
|
||||
self.mesh_action(payload=payload, wait=False)
|
||||
if user_info["full_name"]:
|
||||
self.update_mesh_displayname(user_info=user_info)
|
||||
|
||||
def delete_user_from_mesh(self, *, mesh_user_id: str) -> None:
|
||||
payload = {
|
||||
"action": "deleteuser",
|
||||
"userid": mesh_user_id,
|
||||
}
|
||||
self.mesh_action(payload=payload, wait=False)
|
||||
@@ -0,0 +1,632 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-09 19:56
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("core", "0037_coresettings_open_ai_model_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="coresettings",
|
||||
name="default_time_zone",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("Africa/Abidjan", "Africa/Abidjan"),
|
||||
("Africa/Accra", "Africa/Accra"),
|
||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
||||
("Africa/Algiers", "Africa/Algiers"),
|
||||
("Africa/Asmara", "Africa/Asmara"),
|
||||
("Africa/Asmera", "Africa/Asmera"),
|
||||
("Africa/Bamako", "Africa/Bamako"),
|
||||
("Africa/Bangui", "Africa/Bangui"),
|
||||
("Africa/Banjul", "Africa/Banjul"),
|
||||
("Africa/Bissau", "Africa/Bissau"),
|
||||
("Africa/Blantyre", "Africa/Blantyre"),
|
||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
||||
("Africa/Cairo", "Africa/Cairo"),
|
||||
("Africa/Casablanca", "Africa/Casablanca"),
|
||||
("Africa/Ceuta", "Africa/Ceuta"),
|
||||
("Africa/Conakry", "Africa/Conakry"),
|
||||
("Africa/Dakar", "Africa/Dakar"),
|
||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
||||
("Africa/Djibouti", "Africa/Djibouti"),
|
||||
("Africa/Douala", "Africa/Douala"),
|
||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
||||
("Africa/Freetown", "Africa/Freetown"),
|
||||
("Africa/Gaborone", "Africa/Gaborone"),
|
||||
("Africa/Harare", "Africa/Harare"),
|
||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
||||
("Africa/Juba", "Africa/Juba"),
|
||||
("Africa/Kampala", "Africa/Kampala"),
|
||||
("Africa/Khartoum", "Africa/Khartoum"),
|
||||
("Africa/Kigali", "Africa/Kigali"),
|
||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
||||
("Africa/Lagos", "Africa/Lagos"),
|
||||
("Africa/Libreville", "Africa/Libreville"),
|
||||
("Africa/Lome", "Africa/Lome"),
|
||||
("Africa/Luanda", "Africa/Luanda"),
|
||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
||||
("Africa/Lusaka", "Africa/Lusaka"),
|
||||
("Africa/Malabo", "Africa/Malabo"),
|
||||
("Africa/Maputo", "Africa/Maputo"),
|
||||
("Africa/Maseru", "Africa/Maseru"),
|
||||
("Africa/Mbabane", "Africa/Mbabane"),
|
||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
||||
("Africa/Monrovia", "Africa/Monrovia"),
|
||||
("Africa/Nairobi", "Africa/Nairobi"),
|
||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
||||
("Africa/Niamey", "Africa/Niamey"),
|
||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
||||
("Africa/Tripoli", "Africa/Tripoli"),
|
||||
("Africa/Tunis", "Africa/Tunis"),
|
||||
("Africa/Windhoek", "Africa/Windhoek"),
|
||||
("America/Adak", "America/Adak"),
|
||||
("America/Anchorage", "America/Anchorage"),
|
||||
("America/Anguilla", "America/Anguilla"),
|
||||
("America/Antigua", "America/Antigua"),
|
||||
("America/Araguaina", "America/Araguaina"),
|
||||
(
|
||||
"America/Argentina/Buenos_Aires",
|
||||
"America/Argentina/Buenos_Aires",
|
||||
),
|
||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
||||
(
|
||||
"America/Argentina/ComodRivadavia",
|
||||
"America/Argentina/ComodRivadavia",
|
||||
),
|
||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
||||
(
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
),
|
||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
||||
("America/Aruba", "America/Aruba"),
|
||||
("America/Asuncion", "America/Asuncion"),
|
||||
("America/Atikokan", "America/Atikokan"),
|
||||
("America/Atka", "America/Atka"),
|
||||
("America/Bahia", "America/Bahia"),
|
||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
||||
("America/Barbados", "America/Barbados"),
|
||||
("America/Belem", "America/Belem"),
|
||||
("America/Belize", "America/Belize"),
|
||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
||||
("America/Boa_Vista", "America/Boa_Vista"),
|
||||
("America/Bogota", "America/Bogota"),
|
||||
("America/Boise", "America/Boise"),
|
||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
||||
("America/Campo_Grande", "America/Campo_Grande"),
|
||||
("America/Cancun", "America/Cancun"),
|
||||
("America/Caracas", "America/Caracas"),
|
||||
("America/Catamarca", "America/Catamarca"),
|
||||
("America/Cayenne", "America/Cayenne"),
|
||||
("America/Cayman", "America/Cayman"),
|
||||
("America/Chicago", "America/Chicago"),
|
||||
("America/Chihuahua", "America/Chihuahua"),
|
||||
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
|
||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
||||
("America/Cordoba", "America/Cordoba"),
|
||||
("America/Costa_Rica", "America/Costa_Rica"),
|
||||
("America/Creston", "America/Creston"),
|
||||
("America/Cuiaba", "America/Cuiaba"),
|
||||
("America/Curacao", "America/Curacao"),
|
||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
||||
("America/Dawson", "America/Dawson"),
|
||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
||||
("America/Denver", "America/Denver"),
|
||||
("America/Detroit", "America/Detroit"),
|
||||
("America/Dominica", "America/Dominica"),
|
||||
("America/Edmonton", "America/Edmonton"),
|
||||
("America/Eirunepe", "America/Eirunepe"),
|
||||
("America/El_Salvador", "America/El_Salvador"),
|
||||
("America/Ensenada", "America/Ensenada"),
|
||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
||||
("America/Fortaleza", "America/Fortaleza"),
|
||||
("America/Glace_Bay", "America/Glace_Bay"),
|
||||
("America/Godthab", "America/Godthab"),
|
||||
("America/Goose_Bay", "America/Goose_Bay"),
|
||||
("America/Grand_Turk", "America/Grand_Turk"),
|
||||
("America/Grenada", "America/Grenada"),
|
||||
("America/Guadeloupe", "America/Guadeloupe"),
|
||||
("America/Guatemala", "America/Guatemala"),
|
||||
("America/Guayaquil", "America/Guayaquil"),
|
||||
("America/Guyana", "America/Guyana"),
|
||||
("America/Halifax", "America/Halifax"),
|
||||
("America/Havana", "America/Havana"),
|
||||
("America/Hermosillo", "America/Hermosillo"),
|
||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
||||
("America/Indianapolis", "America/Indianapolis"),
|
||||
("America/Inuvik", "America/Inuvik"),
|
||||
("America/Iqaluit", "America/Iqaluit"),
|
||||
("America/Jamaica", "America/Jamaica"),
|
||||
("America/Jujuy", "America/Jujuy"),
|
||||
("America/Juneau", "America/Juneau"),
|
||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
||||
("America/Knox_IN", "America/Knox_IN"),
|
||||
("America/Kralendijk", "America/Kralendijk"),
|
||||
("America/La_Paz", "America/La_Paz"),
|
||||
("America/Lima", "America/Lima"),
|
||||
("America/Los_Angeles", "America/Los_Angeles"),
|
||||
("America/Louisville", "America/Louisville"),
|
||||
("America/Lower_Princes", "America/Lower_Princes"),
|
||||
("America/Maceio", "America/Maceio"),
|
||||
("America/Managua", "America/Managua"),
|
||||
("America/Manaus", "America/Manaus"),
|
||||
("America/Marigot", "America/Marigot"),
|
||||
("America/Martinique", "America/Martinique"),
|
||||
("America/Matamoros", "America/Matamoros"),
|
||||
("America/Mazatlan", "America/Mazatlan"),
|
||||
("America/Mendoza", "America/Mendoza"),
|
||||
("America/Menominee", "America/Menominee"),
|
||||
("America/Merida", "America/Merida"),
|
||||
("America/Metlakatla", "America/Metlakatla"),
|
||||
("America/Mexico_City", "America/Mexico_City"),
|
||||
("America/Miquelon", "America/Miquelon"),
|
||||
("America/Moncton", "America/Moncton"),
|
||||
("America/Monterrey", "America/Monterrey"),
|
||||
("America/Montevideo", "America/Montevideo"),
|
||||
("America/Montreal", "America/Montreal"),
|
||||
("America/Montserrat", "America/Montserrat"),
|
||||
("America/Nassau", "America/Nassau"),
|
||||
("America/New_York", "America/New_York"),
|
||||
("America/Nipigon", "America/Nipigon"),
|
||||
("America/Nome", "America/Nome"),
|
||||
("America/Noronha", "America/Noronha"),
|
||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
||||
(
|
||||
"America/North_Dakota/New_Salem",
|
||||
"America/North_Dakota/New_Salem",
|
||||
),
|
||||
("America/Nuuk", "America/Nuuk"),
|
||||
("America/Ojinaga", "America/Ojinaga"),
|
||||
("America/Panama", "America/Panama"),
|
||||
("America/Pangnirtung", "America/Pangnirtung"),
|
||||
("America/Paramaribo", "America/Paramaribo"),
|
||||
("America/Phoenix", "America/Phoenix"),
|
||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
||||
("America/Porto_Acre", "America/Porto_Acre"),
|
||||
("America/Porto_Velho", "America/Porto_Velho"),
|
||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
||||
("America/Rainy_River", "America/Rainy_River"),
|
||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
||||
("America/Recife", "America/Recife"),
|
||||
("America/Regina", "America/Regina"),
|
||||
("America/Resolute", "America/Resolute"),
|
||||
("America/Rio_Branco", "America/Rio_Branco"),
|
||||
("America/Rosario", "America/Rosario"),
|
||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
||||
("America/Santarem", "America/Santarem"),
|
||||
("America/Santiago", "America/Santiago"),
|
||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
||||
("America/Scoresbysund", "America/Scoresbysund"),
|
||||
("America/Shiprock", "America/Shiprock"),
|
||||
("America/Sitka", "America/Sitka"),
|
||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
||||
("America/St_Johns", "America/St_Johns"),
|
||||
("America/St_Kitts", "America/St_Kitts"),
|
||||
("America/St_Lucia", "America/St_Lucia"),
|
||||
("America/St_Thomas", "America/St_Thomas"),
|
||||
("America/St_Vincent", "America/St_Vincent"),
|
||||
("America/Swift_Current", "America/Swift_Current"),
|
||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
||||
("America/Thule", "America/Thule"),
|
||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
||||
("America/Tijuana", "America/Tijuana"),
|
||||
("America/Toronto", "America/Toronto"),
|
||||
("America/Tortola", "America/Tortola"),
|
||||
("America/Vancouver", "America/Vancouver"),
|
||||
("America/Virgin", "America/Virgin"),
|
||||
("America/Whitehorse", "America/Whitehorse"),
|
||||
("America/Winnipeg", "America/Winnipeg"),
|
||||
("America/Yakutat", "America/Yakutat"),
|
||||
("America/Yellowknife", "America/Yellowknife"),
|
||||
("Antarctica/Casey", "Antarctica/Casey"),
|
||||
("Antarctica/Davis", "Antarctica/Davis"),
|
||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
||||
("Antarctica/Troll", "Antarctica/Troll"),
|
||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
||||
("Asia/Aden", "Asia/Aden"),
|
||||
("Asia/Almaty", "Asia/Almaty"),
|
||||
("Asia/Amman", "Asia/Amman"),
|
||||
("Asia/Anadyr", "Asia/Anadyr"),
|
||||
("Asia/Aqtau", "Asia/Aqtau"),
|
||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
||||
("Asia/Atyrau", "Asia/Atyrau"),
|
||||
("Asia/Baghdad", "Asia/Baghdad"),
|
||||
("Asia/Bahrain", "Asia/Bahrain"),
|
||||
("Asia/Baku", "Asia/Baku"),
|
||||
("Asia/Bangkok", "Asia/Bangkok"),
|
||||
("Asia/Barnaul", "Asia/Barnaul"),
|
||||
("Asia/Beirut", "Asia/Beirut"),
|
||||
("Asia/Bishkek", "Asia/Bishkek"),
|
||||
("Asia/Brunei", "Asia/Brunei"),
|
||||
("Asia/Calcutta", "Asia/Calcutta"),
|
||||
("Asia/Chita", "Asia/Chita"),
|
||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
||||
("Asia/Chongqing", "Asia/Chongqing"),
|
||||
("Asia/Chungking", "Asia/Chungking"),
|
||||
("Asia/Colombo", "Asia/Colombo"),
|
||||
("Asia/Dacca", "Asia/Dacca"),
|
||||
("Asia/Damascus", "Asia/Damascus"),
|
||||
("Asia/Dhaka", "Asia/Dhaka"),
|
||||
("Asia/Dili", "Asia/Dili"),
|
||||
("Asia/Dubai", "Asia/Dubai"),
|
||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
||||
("Asia/Famagusta", "Asia/Famagusta"),
|
||||
("Asia/Gaza", "Asia/Gaza"),
|
||||
("Asia/Harbin", "Asia/Harbin"),
|
||||
("Asia/Hebron", "Asia/Hebron"),
|
||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
||||
("Asia/Hovd", "Asia/Hovd"),
|
||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
||||
("Asia/Istanbul", "Asia/Istanbul"),
|
||||
("Asia/Jakarta", "Asia/Jakarta"),
|
||||
("Asia/Jayapura", "Asia/Jayapura"),
|
||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
||||
("Asia/Kabul", "Asia/Kabul"),
|
||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
||||
("Asia/Karachi", "Asia/Karachi"),
|
||||
("Asia/Kashgar", "Asia/Kashgar"),
|
||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
||||
("Asia/Katmandu", "Asia/Katmandu"),
|
||||
("Asia/Khandyga", "Asia/Khandyga"),
|
||||
("Asia/Kolkata", "Asia/Kolkata"),
|
||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
||||
("Asia/Kuching", "Asia/Kuching"),
|
||||
("Asia/Kuwait", "Asia/Kuwait"),
|
||||
("Asia/Macao", "Asia/Macao"),
|
||||
("Asia/Macau", "Asia/Macau"),
|
||||
("Asia/Magadan", "Asia/Magadan"),
|
||||
("Asia/Makassar", "Asia/Makassar"),
|
||||
("Asia/Manila", "Asia/Manila"),
|
||||
("Asia/Muscat", "Asia/Muscat"),
|
||||
("Asia/Nicosia", "Asia/Nicosia"),
|
||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
||||
("Asia/Omsk", "Asia/Omsk"),
|
||||
("Asia/Oral", "Asia/Oral"),
|
||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
||||
("Asia/Pontianak", "Asia/Pontianak"),
|
||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
||||
("Asia/Qatar", "Asia/Qatar"),
|
||||
("Asia/Qostanay", "Asia/Qostanay"),
|
||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
||||
("Asia/Rangoon", "Asia/Rangoon"),
|
||||
("Asia/Riyadh", "Asia/Riyadh"),
|
||||
("Asia/Saigon", "Asia/Saigon"),
|
||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
||||
("Asia/Samarkand", "Asia/Samarkand"),
|
||||
("Asia/Seoul", "Asia/Seoul"),
|
||||
("Asia/Shanghai", "Asia/Shanghai"),
|
||||
("Asia/Singapore", "Asia/Singapore"),
|
||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
||||
("Asia/Taipei", "Asia/Taipei"),
|
||||
("Asia/Tashkent", "Asia/Tashkent"),
|
||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
||||
("Asia/Tehran", "Asia/Tehran"),
|
||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
||||
("Asia/Thimbu", "Asia/Thimbu"),
|
||||
("Asia/Thimphu", "Asia/Thimphu"),
|
||||
("Asia/Tokyo", "Asia/Tokyo"),
|
||||
("Asia/Tomsk", "Asia/Tomsk"),
|
||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
||||
("Asia/Urumqi", "Asia/Urumqi"),
|
||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
||||
("Asia/Vientiane", "Asia/Vientiane"),
|
||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
||||
("Asia/Yangon", "Asia/Yangon"),
|
||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
||||
("Asia/Yerevan", "Asia/Yerevan"),
|
||||
("Atlantic/Azores", "Atlantic/Azores"),
|
||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
||||
("Atlantic/Canary", "Atlantic/Canary"),
|
||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
||||
("Australia/ACT", "Australia/ACT"),
|
||||
("Australia/Adelaide", "Australia/Adelaide"),
|
||||
("Australia/Brisbane", "Australia/Brisbane"),
|
||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
||||
("Australia/Canberra", "Australia/Canberra"),
|
||||
("Australia/Currie", "Australia/Currie"),
|
||||
("Australia/Darwin", "Australia/Darwin"),
|
||||
("Australia/Eucla", "Australia/Eucla"),
|
||||
("Australia/Hobart", "Australia/Hobart"),
|
||||
("Australia/LHI", "Australia/LHI"),
|
||||
("Australia/Lindeman", "Australia/Lindeman"),
|
||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
||||
("Australia/Melbourne", "Australia/Melbourne"),
|
||||
("Australia/NSW", "Australia/NSW"),
|
||||
("Australia/North", "Australia/North"),
|
||||
("Australia/Perth", "Australia/Perth"),
|
||||
("Australia/Queensland", "Australia/Queensland"),
|
||||
("Australia/South", "Australia/South"),
|
||||
("Australia/Sydney", "Australia/Sydney"),
|
||||
("Australia/Tasmania", "Australia/Tasmania"),
|
||||
("Australia/Victoria", "Australia/Victoria"),
|
||||
("Australia/West", "Australia/West"),
|
||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
||||
("Brazil/Acre", "Brazil/Acre"),
|
||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
||||
("Brazil/East", "Brazil/East"),
|
||||
("Brazil/West", "Brazil/West"),
|
||||
("CET", "CET"),
|
||||
("CST6CDT", "CST6CDT"),
|
||||
("Canada/Atlantic", "Canada/Atlantic"),
|
||||
("Canada/Central", "Canada/Central"),
|
||||
("Canada/Eastern", "Canada/Eastern"),
|
||||
("Canada/Mountain", "Canada/Mountain"),
|
||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
||||
("Canada/Pacific", "Canada/Pacific"),
|
||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
||||
("Canada/Yukon", "Canada/Yukon"),
|
||||
("Chile/Continental", "Chile/Continental"),
|
||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
||||
("Cuba", "Cuba"),
|
||||
("EET", "EET"),
|
||||
("EST", "EST"),
|
||||
("EST5EDT", "EST5EDT"),
|
||||
("Egypt", "Egypt"),
|
||||
("Eire", "Eire"),
|
||||
("Etc/GMT", "Etc/GMT"),
|
||||
("Etc/GMT+0", "Etc/GMT+0"),
|
||||
("Etc/GMT+1", "Etc/GMT+1"),
|
||||
("Etc/GMT+10", "Etc/GMT+10"),
|
||||
("Etc/GMT+11", "Etc/GMT+11"),
|
||||
("Etc/GMT+12", "Etc/GMT+12"),
|
||||
("Etc/GMT+2", "Etc/GMT+2"),
|
||||
("Etc/GMT+3", "Etc/GMT+3"),
|
||||
("Etc/GMT+4", "Etc/GMT+4"),
|
||||
("Etc/GMT+5", "Etc/GMT+5"),
|
||||
("Etc/GMT+6", "Etc/GMT+6"),
|
||||
("Etc/GMT+7", "Etc/GMT+7"),
|
||||
("Etc/GMT+8", "Etc/GMT+8"),
|
||||
("Etc/GMT+9", "Etc/GMT+9"),
|
||||
("Etc/GMT-0", "Etc/GMT-0"),
|
||||
("Etc/GMT-1", "Etc/GMT-1"),
|
||||
("Etc/GMT-10", "Etc/GMT-10"),
|
||||
("Etc/GMT-11", "Etc/GMT-11"),
|
||||
("Etc/GMT-12", "Etc/GMT-12"),
|
||||
("Etc/GMT-13", "Etc/GMT-13"),
|
||||
("Etc/GMT-14", "Etc/GMT-14"),
|
||||
("Etc/GMT-2", "Etc/GMT-2"),
|
||||
("Etc/GMT-3", "Etc/GMT-3"),
|
||||
("Etc/GMT-4", "Etc/GMT-4"),
|
||||
("Etc/GMT-5", "Etc/GMT-5"),
|
||||
("Etc/GMT-6", "Etc/GMT-6"),
|
||||
("Etc/GMT-7", "Etc/GMT-7"),
|
||||
("Etc/GMT-8", "Etc/GMT-8"),
|
||||
("Etc/GMT-9", "Etc/GMT-9"),
|
||||
("Etc/GMT0", "Etc/GMT0"),
|
||||
("Etc/Greenwich", "Etc/Greenwich"),
|
||||
("Etc/UCT", "Etc/UCT"),
|
||||
("Etc/UTC", "Etc/UTC"),
|
||||
("Etc/Universal", "Etc/Universal"),
|
||||
("Etc/Zulu", "Etc/Zulu"),
|
||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
||||
("Europe/Andorra", "Europe/Andorra"),
|
||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
||||
("Europe/Athens", "Europe/Athens"),
|
||||
("Europe/Belfast", "Europe/Belfast"),
|
||||
("Europe/Belgrade", "Europe/Belgrade"),
|
||||
("Europe/Berlin", "Europe/Berlin"),
|
||||
("Europe/Bratislava", "Europe/Bratislava"),
|
||||
("Europe/Brussels", "Europe/Brussels"),
|
||||
("Europe/Bucharest", "Europe/Bucharest"),
|
||||
("Europe/Budapest", "Europe/Budapest"),
|
||||
("Europe/Busingen", "Europe/Busingen"),
|
||||
("Europe/Chisinau", "Europe/Chisinau"),
|
||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
||||
("Europe/Dublin", "Europe/Dublin"),
|
||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
||||
("Europe/Guernsey", "Europe/Guernsey"),
|
||||
("Europe/Helsinki", "Europe/Helsinki"),
|
||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
||||
("Europe/Istanbul", "Europe/Istanbul"),
|
||||
("Europe/Jersey", "Europe/Jersey"),
|
||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
||||
("Europe/Kiev", "Europe/Kiev"),
|
||||
("Europe/Kirov", "Europe/Kirov"),
|
||||
("Europe/Kyiv", "Europe/Kyiv"),
|
||||
("Europe/Lisbon", "Europe/Lisbon"),
|
||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
||||
("Europe/London", "Europe/London"),
|
||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
||||
("Europe/Madrid", "Europe/Madrid"),
|
||||
("Europe/Malta", "Europe/Malta"),
|
||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
||||
("Europe/Minsk", "Europe/Minsk"),
|
||||
("Europe/Monaco", "Europe/Monaco"),
|
||||
("Europe/Moscow", "Europe/Moscow"),
|
||||
("Europe/Nicosia", "Europe/Nicosia"),
|
||||
("Europe/Oslo", "Europe/Oslo"),
|
||||
("Europe/Paris", "Europe/Paris"),
|
||||
("Europe/Podgorica", "Europe/Podgorica"),
|
||||
("Europe/Prague", "Europe/Prague"),
|
||||
("Europe/Riga", "Europe/Riga"),
|
||||
("Europe/Rome", "Europe/Rome"),
|
||||
("Europe/Samara", "Europe/Samara"),
|
||||
("Europe/San_Marino", "Europe/San_Marino"),
|
||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
||||
("Europe/Saratov", "Europe/Saratov"),
|
||||
("Europe/Simferopol", "Europe/Simferopol"),
|
||||
("Europe/Skopje", "Europe/Skopje"),
|
||||
("Europe/Sofia", "Europe/Sofia"),
|
||||
("Europe/Stockholm", "Europe/Stockholm"),
|
||||
("Europe/Tallinn", "Europe/Tallinn"),
|
||||
("Europe/Tirane", "Europe/Tirane"),
|
||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
||||
("Europe/Vaduz", "Europe/Vaduz"),
|
||||
("Europe/Vatican", "Europe/Vatican"),
|
||||
("Europe/Vienna", "Europe/Vienna"),
|
||||
("Europe/Vilnius", "Europe/Vilnius"),
|
||||
("Europe/Volgograd", "Europe/Volgograd"),
|
||||
("Europe/Warsaw", "Europe/Warsaw"),
|
||||
("Europe/Zagreb", "Europe/Zagreb"),
|
||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
||||
("Europe/Zurich", "Europe/Zurich"),
|
||||
("Factory", "Factory"),
|
||||
("GB", "GB"),
|
||||
("GB-Eire", "GB-Eire"),
|
||||
("GMT", "GMT"),
|
||||
("GMT+0", "GMT+0"),
|
||||
("GMT-0", "GMT-0"),
|
||||
("GMT0", "GMT0"),
|
||||
("Greenwich", "Greenwich"),
|
||||
("HST", "HST"),
|
||||
("Hongkong", "Hongkong"),
|
||||
("Iceland", "Iceland"),
|
||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
||||
("Indian/Chagos", "Indian/Chagos"),
|
||||
("Indian/Christmas", "Indian/Christmas"),
|
||||
("Indian/Cocos", "Indian/Cocos"),
|
||||
("Indian/Comoro", "Indian/Comoro"),
|
||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
||||
("Indian/Mahe", "Indian/Mahe"),
|
||||
("Indian/Maldives", "Indian/Maldives"),
|
||||
("Indian/Mauritius", "Indian/Mauritius"),
|
||||
("Indian/Mayotte", "Indian/Mayotte"),
|
||||
("Indian/Reunion", "Indian/Reunion"),
|
||||
("Iran", "Iran"),
|
||||
("Israel", "Israel"),
|
||||
("Jamaica", "Jamaica"),
|
||||
("Japan", "Japan"),
|
||||
("Kwajalein", "Kwajalein"),
|
||||
("Libya", "Libya"),
|
||||
("MET", "MET"),
|
||||
("MST", "MST"),
|
||||
("MST7MDT", "MST7MDT"),
|
||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
||||
("Mexico/General", "Mexico/General"),
|
||||
("NZ", "NZ"),
|
||||
("NZ-CHAT", "NZ-CHAT"),
|
||||
("Navajo", "Navajo"),
|
||||
("PRC", "PRC"),
|
||||
("PST8PDT", "PST8PDT"),
|
||||
("Pacific/Apia", "Pacific/Apia"),
|
||||
("Pacific/Auckland", "Pacific/Auckland"),
|
||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
||||
("Pacific/Chatham", "Pacific/Chatham"),
|
||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
||||
("Pacific/Easter", "Pacific/Easter"),
|
||||
("Pacific/Efate", "Pacific/Efate"),
|
||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
||||
("Pacific/Fiji", "Pacific/Fiji"),
|
||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
||||
("Pacific/Gambier", "Pacific/Gambier"),
|
||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
||||
("Pacific/Guam", "Pacific/Guam"),
|
||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
||||
("Pacific/Johnston", "Pacific/Johnston"),
|
||||
("Pacific/Kanton", "Pacific/Kanton"),
|
||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
||||
("Pacific/Majuro", "Pacific/Majuro"),
|
||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
||||
("Pacific/Midway", "Pacific/Midway"),
|
||||
("Pacific/Nauru", "Pacific/Nauru"),
|
||||
("Pacific/Niue", "Pacific/Niue"),
|
||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
||||
("Pacific/Noumea", "Pacific/Noumea"),
|
||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
||||
("Pacific/Palau", "Pacific/Palau"),
|
||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
||||
("Pacific/Ponape", "Pacific/Ponape"),
|
||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
||||
("Pacific/Saipan", "Pacific/Saipan"),
|
||||
("Pacific/Samoa", "Pacific/Samoa"),
|
||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
||||
("Pacific/Truk", "Pacific/Truk"),
|
||||
("Pacific/Wake", "Pacific/Wake"),
|
||||
("Pacific/Wallis", "Pacific/Wallis"),
|
||||
("Pacific/Yap", "Pacific/Yap"),
|
||||
("Poland", "Poland"),
|
||||
("Portugal", "Portugal"),
|
||||
("ROC", "ROC"),
|
||||
("ROK", "ROK"),
|
||||
("Singapore", "Singapore"),
|
||||
("Turkey", "Turkey"),
|
||||
("UCT", "UCT"),
|
||||
("US/Alaska", "US/Alaska"),
|
||||
("US/Aleutian", "US/Aleutian"),
|
||||
("US/Arizona", "US/Arizona"),
|
||||
("US/Central", "US/Central"),
|
||||
("US/East-Indiana", "US/East-Indiana"),
|
||||
("US/Eastern", "US/Eastern"),
|
||||
("US/Hawaii", "US/Hawaii"),
|
||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
||||
("US/Michigan", "US/Michigan"),
|
||||
("US/Mountain", "US/Mountain"),
|
||||
("US/Pacific", "US/Pacific"),
|
||||
("US/Samoa", "US/Samoa"),
|
||||
("UTC", "UTC"),
|
||||
("Universal", "Universal"),
|
||||
("W-SU", "W-SU"),
|
||||
("WET", "WET"),
|
||||
("Zulu", "Zulu"),
|
||||
("localtime", "localtime"),
|
||||
],
|
||||
default="America/Los_Angeles",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.2.9 on 2024-01-26 00:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("core", "0038_alter_coresettings_default_time_zone"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="smtp_from_name",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-01-28 02:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0039_coresettings_smtp_from_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="customfield",
|
||||
name="hide_in_summary",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/core/migrations/0041_auto_20240128_0301.py
Normal file
18
api/tacticalrmm/core/migrations/0041_auto_20240128_0301.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.9 on 2024-01-28 03:01
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def update_hide_in_summary(apps, schema_editor):
|
||||
CustomField = apps.get_model("core", "CustomField")
|
||||
for field in CustomField.objects.filter(hide_in_ui=True):
|
||||
field.hide_in_summary = True
|
||||
field.save(update_fields=["hide_in_summary"])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("core", "0040_customfield_hide_in_summary"),
|
||||
]
|
||||
|
||||
operations = [migrations.RunPython(update_hide_in_summary)]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-20 02:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0041_auto_20240128_0301"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="mesh_company_name",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-23 19:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0042_coresettings_mesh_company_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="sync_mesh_with_trmm",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.2.11 on 2024-03-12 05:23
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0043_coresettings_sync_mesh_with_trmm"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="coresettings",
|
||||
name="mesh_disable_auto_login",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,65 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 20:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0044_remove_coresettings_mesh_disable_auto_login"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="enable_server_scripts",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="enable_server_webterminal",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="urlaction",
|
||||
name="action_type",
|
||||
field=models.CharField(
|
||||
choices=[("web", "Web"), ("rest", "Rest")], default="web", max_length=10
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="urlaction",
|
||||
name="rest_body",
|
||||
field=models.TextField(blank=True, default="", null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="urlaction",
|
||||
name="rest_headers",
|
||||
field=models.TextField(blank=True, default="", null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="urlaction",
|
||||
name="rest_method",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("get", "Get"),
|
||||
("post", "Post"),
|
||||
("put", "Put"),
|
||||
("delete", "Delete"),
|
||||
("patch", "Patch"),
|
||||
],
|
||||
default="post",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="urlaction",
|
||||
name="desc",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="urlaction",
|
||||
name="name",
|
||||
field=models.CharField(max_length=255),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.13 on 2024-07-05 19:17
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0045_coresettings_enable_server_scripts_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="notify_on_info_alerts",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="notify_on_warning_alerts",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.13 on 2024-07-05 19:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0046_coresettings_notify_on_info_alerts_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="coresettings",
|
||||
name="notify_on_warning_alerts",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.16 on 2024-11-04 23:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0047_alter_coresettings_notify_on_warning_alerts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="block_local_user_logon",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="sso_enabled",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,9 +1,9 @@
|
||||
import smtplib
|
||||
from contextlib import suppress
|
||||
from email.headerregistry import Address
|
||||
from email.message import EmailMessage
|
||||
from typing import TYPE_CHECKING, List, Optional, cast
|
||||
|
||||
import pytz
|
||||
import requests
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
@@ -15,16 +15,19 @@ from twilio.rest import Client as TwClient
|
||||
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from tacticalrmm.constants import (
|
||||
ALL_TIMEZONES,
|
||||
CORESETTINGS_CACHE_KEY,
|
||||
CustomFieldModel,
|
||||
CustomFieldType,
|
||||
DebugLogLevel,
|
||||
URLActionRestMethod,
|
||||
URLActionType,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from alerts.models import AlertTemplate
|
||||
|
||||
TZ_CHOICES = [(_, _) for _ in pytz.all_timezones]
|
||||
TZ_CHOICES = [(_, _) for _ in ALL_TIMEZONES]
|
||||
|
||||
|
||||
class CoreSettings(BaseAuditModel):
|
||||
@@ -44,6 +47,7 @@ class CoreSettings(BaseAuditModel):
|
||||
smtp_from_email = models.CharField(
|
||||
max_length=255, blank=True, default="from@example.com"
|
||||
)
|
||||
smtp_from_name = models.CharField(max_length=255, null=True, blank=True)
|
||||
smtp_host = models.CharField(max_length=255, blank=True, default="smtp.gmail.com")
|
||||
smtp_host_user = models.CharField(
|
||||
max_length=255, blank=True, default="admin@example.com"
|
||||
@@ -72,7 +76,8 @@ class CoreSettings(BaseAuditModel):
|
||||
mesh_device_group = models.CharField(
|
||||
max_length=255, null=True, blank=True, default="TacticalRMM"
|
||||
)
|
||||
mesh_disable_auto_login = models.BooleanField(default=False)
|
||||
mesh_company_name = models.CharField(max_length=255, null=True, blank=True)
|
||||
sync_mesh_with_trmm = models.BooleanField(default=True)
|
||||
agent_auto_update = models.BooleanField(default=True)
|
||||
workstation_policy = models.ForeignKey(
|
||||
"automation.Policy",
|
||||
@@ -102,6 +107,13 @@ class CoreSettings(BaseAuditModel):
|
||||
open_ai_model = models.CharField(
|
||||
max_length=255, blank=True, default="gpt-3.5-turbo"
|
||||
)
|
||||
enable_server_scripts = models.BooleanField(default=True)
|
||||
enable_server_webterminal = models.BooleanField(default=False)
|
||||
notify_on_info_alerts = models.BooleanField(default=False)
|
||||
notify_on_warning_alerts = models.BooleanField(default=True)
|
||||
|
||||
block_local_user_logon = models.BooleanField(default=False)
|
||||
sso_enabled = models.BooleanField(default=False)
|
||||
|
||||
def save(self, *args, **kwargs) -> None:
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
@@ -119,9 +131,23 @@ class CoreSettings(BaseAuditModel):
|
||||
self.mesh_token = settings.MESH_TOKEN_KEY
|
||||
|
||||
old_settings = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
if old_settings:
|
||||
# fail safe to not lock out user logons
|
||||
if not self.sso_enabled and self.block_local_user_logon:
|
||||
self.block_local_user_logon = False
|
||||
|
||||
if old_settings.sso_enabled != self.sso_enabled and self.sso_enabled:
|
||||
from core.utils import token_is_valid
|
||||
|
||||
_, valid = token_is_valid()
|
||||
if not valid:
|
||||
raise ValidationError("")
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
if old_settings:
|
||||
|
||||
if (
|
||||
old_settings.alert_template != self.alert_template
|
||||
or old_settings.server_policy != self.server_policy
|
||||
@@ -144,6 +170,11 @@ class CoreSettings(BaseAuditModel):
|
||||
def __str__(self) -> str:
|
||||
return "Global Site Settings"
|
||||
|
||||
@property
|
||||
def mesh_api_superuser(self) -> str:
|
||||
# must be lowercase otherwise mesh api breaks
|
||||
return self.mesh_username.lower()
|
||||
|
||||
@property
|
||||
def sms_is_configured(self) -> bool:
|
||||
return all(
|
||||
@@ -177,6 +208,28 @@ class CoreSettings(BaseAuditModel):
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def server_scripts_enabled(self) -> bool:
|
||||
if (
|
||||
getattr(settings, "HOSTED", False)
|
||||
or getattr(settings, "TRMM_DISABLE_SERVER_SCRIPTS", False)
|
||||
or getattr(settings, "DEMO", False)
|
||||
):
|
||||
return False
|
||||
|
||||
return self.enable_server_scripts
|
||||
|
||||
@property
|
||||
def web_terminal_enabled(self) -> bool:
|
||||
if (
|
||||
getattr(settings, "HOSTED", False)
|
||||
or getattr(settings, "TRMM_DISABLE_WEB_TERMINAL", False)
|
||||
or getattr(settings, "DEMO", False)
|
||||
):
|
||||
return False
|
||||
|
||||
return self.enable_server_webterminal
|
||||
|
||||
def send_mail(
|
||||
self,
|
||||
subject: str,
|
||||
@@ -207,7 +260,14 @@ class CoreSettings(BaseAuditModel):
|
||||
try:
|
||||
msg = EmailMessage()
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = from_address
|
||||
|
||||
if self.smtp_from_name:
|
||||
msg["From"] = Address(
|
||||
display_name=self.smtp_from_name, addr_spec=from_address
|
||||
)
|
||||
else:
|
||||
msg["From"] = from_address
|
||||
|
||||
msg["To"] = email_recipients
|
||||
msg.set_content(body)
|
||||
|
||||
@@ -222,9 +282,16 @@ class CoreSettings(BaseAuditModel):
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
else:
|
||||
# smtp relay. no auth required
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
# gmail smtp relay specific handling.
|
||||
if self.smtp_host == "smtp-relay.gmail.com":
|
||||
server.ehlo()
|
||||
server.starttls()
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
else:
|
||||
# smtp relay. no auth required
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
|
||||
except Exception as e:
|
||||
DebugLog.error(message=f"Sending email failed with error: {e}")
|
||||
@@ -298,6 +365,7 @@ class CustomField(BaseAuditModel):
|
||||
default=list,
|
||||
)
|
||||
hide_in_ui = models.BooleanField(default=False)
|
||||
hide_in_summary = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
unique_together = (("model", "name"),)
|
||||
@@ -348,7 +416,7 @@ class CodeSignToken(models.Model):
|
||||
if not self.pk and CodeSignToken.objects.exists():
|
||||
raise ValidationError("There can only be one CodeSignToken instance")
|
||||
|
||||
super(CodeSignToken, self).save(*args, **kwargs)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
@@ -403,9 +471,19 @@ class GlobalKVStore(BaseAuditModel):
|
||||
|
||||
|
||||
class URLAction(BaseAuditModel):
|
||||
name = models.CharField(max_length=25)
|
||||
desc = models.CharField(max_length=100, null=True, blank=True)
|
||||
name = models.CharField(max_length=255)
|
||||
desc = models.TextField(null=True, blank=True)
|
||||
pattern = models.TextField()
|
||||
action_type = models.CharField(
|
||||
max_length=10, choices=URLActionType.choices, default=URLActionType.WEB
|
||||
)
|
||||
rest_method = models.CharField(
|
||||
max_length=10,
|
||||
choices=URLActionRestMethod.choices,
|
||||
default=URLActionRestMethod.POST,
|
||||
)
|
||||
rest_body = models.TextField(null=True, blank=True, default="")
|
||||
rest_headers = models.TextField(null=True, blank=True, default="")
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
@@ -415,47 +493,3 @@ class URLAction(BaseAuditModel):
|
||||
from .serializers import URLActionSerializer
|
||||
|
||||
return URLActionSerializer(action).data
|
||||
|
||||
|
||||
RUN_ON_CHOICES = (
|
||||
("client", "Client"),
|
||||
("site", "Site"),
|
||||
("agent", "Agent"),
|
||||
("once", "Once"),
|
||||
)
|
||||
|
||||
SCHEDULE_CHOICES = (("daily", "Daily"), ("weekly", "Weekly"), ("monthly", "Monthly"))
|
||||
|
||||
|
||||
""" class GlobalTask(models.Model):
|
||||
script = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="script",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
custom_field = models.OneToOneField(
|
||||
"core.CustomField",
|
||||
related_name="globaltask",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
timeout = models.PositiveIntegerField(default=120)
|
||||
retcode = models.IntegerField(null=True, blank=True)
|
||||
stdout = models.TextField(null=True, blank=True)
|
||||
stderr = models.TextField(null=True, blank=True)
|
||||
execution_time = models.CharField(max_length=100, default="0.0000")
|
||||
run_schedule = models.CharField(
|
||||
max_length=25, choices=SCHEDULE_CHOICES, default="once"
|
||||
)
|
||||
run_on = models.CharField(
|
||||
max_length=25, choices=RUN_ON_CHOICES, default="once"
|
||||
) """
|
||||
|
||||
@@ -11,9 +11,23 @@ class CoreSettingsPerms(permissions.BasePermission):
|
||||
return _has_perm(r, "can_edit_core_settings")
|
||||
|
||||
|
||||
class GlobalKeyStorePerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_view_global_keystore")
|
||||
|
||||
return _has_perm(r, "can_edit_global_keystore")
|
||||
|
||||
|
||||
class URLActionPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return _has_perm(r, "can_run_urlactions")
|
||||
if r.method in {"GET", "PATCH"}:
|
||||
return _has_perm(r, "can_run_urlactions")
|
||||
elif r.path == "/core/urlaction/run/test/" and r.method == "POST":
|
||||
return _has_perm(r, "can_run_urlactions")
|
||||
|
||||
# TODO make a manage url action perm instead?
|
||||
return _has_perm(r, "can_edit_core_settings")
|
||||
|
||||
|
||||
class ServerMaintPerms(permissions.BasePermission):
|
||||
@@ -30,5 +44,17 @@ class CustomFieldPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_view_customfields")
|
||||
elif r.method == "PATCH" and view.__class__.__name__ == "GetAddCustomFields":
|
||||
return _has_perm(r, "can_view_customfields")
|
||||
|
||||
return _has_perm(r, "can_manage_customfields")
|
||||
|
||||
|
||||
class RunServerScriptPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return _has_perm(r, "can_run_server_scripts")
|
||||
|
||||
|
||||
class WebTerminalPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return _has_perm(r, "can_use_webterm")
|
||||
|
||||
@@ -1,14 +1,30 @@
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from rest_framework import serializers
|
||||
|
||||
from tacticalrmm.constants import ALL_TIMEZONES
|
||||
|
||||
from .models import CodeSignToken, CoreSettings, CustomField, GlobalKVStore, URLAction
|
||||
|
||||
|
||||
class CoreSettingsSerializer(serializers.ModelSerializer):
|
||||
class HostedCoreMixin:
|
||||
def to_representation(self, instance):
|
||||
ret = super().to_representation(instance) # type: ignore
|
||||
if getattr(settings, "HOSTED", False):
|
||||
for field in ("mesh_site", "mesh_token", "mesh_username"):
|
||||
ret[field] = "n/a"
|
||||
|
||||
ret["sync_mesh_with_trmm"] = True
|
||||
ret["enable_server_scripts"] = False
|
||||
ret["enable_server_webterminal"] = False
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class CoreSettingsSerializer(HostedCoreMixin, serializers.ModelSerializer):
|
||||
all_timezones = serializers.SerializerMethodField("all_time_zones")
|
||||
|
||||
def all_time_zones(self, obj):
|
||||
return pytz.all_timezones
|
||||
return ALL_TIMEZONES
|
||||
|
||||
class Meta:
|
||||
model = CoreSettings
|
||||
@@ -16,7 +32,7 @@ class CoreSettingsSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
# for audting
|
||||
class CoreSerializer(serializers.ModelSerializer):
|
||||
class CoreSerializer(HostedCoreMixin, serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CoreSettings
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,20 +1,36 @@
|
||||
import time
|
||||
import asyncio
|
||||
import traceback
|
||||
from contextlib import suppress
|
||||
from time import sleep
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import nats
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.db.models import Prefetch
|
||||
from django.db.utils import DatabaseError
|
||||
from django.utils import timezone as djangotime
|
||||
from packaging import version as pyver
|
||||
|
||||
from accounts.models import User
|
||||
from accounts.utils import is_superuser
|
||||
from agents.models import Agent
|
||||
from agents.tasks import clear_faults_task, prune_agent_history
|
||||
from alerts.models import Alert
|
||||
from alerts.tasks import prune_resolved_alerts
|
||||
from autotasks.models import AutomatedTask, TaskResult
|
||||
from checks.models import Check, CheckResult
|
||||
from checks.models import Check, CheckHistory, CheckResult
|
||||
from checks.tasks import prune_check_history
|
||||
from clients.models import Client, Site
|
||||
from core.utils import get_core_settings
|
||||
from core.mesh_utils import (
|
||||
MeshSync,
|
||||
build_mesh_display_name,
|
||||
has_mesh_perms,
|
||||
transform_mesh,
|
||||
transform_trmm,
|
||||
)
|
||||
from core.models import CoreSettings
|
||||
from core.utils import get_core_settings, get_mesh_ws_url, make_alpha_numeric
|
||||
from logs.models import PendingAction
|
||||
from logs.tasks import prune_audit_log, prune_debug_log
|
||||
from tacticalrmm.celery import app
|
||||
@@ -23,6 +39,7 @@ from tacticalrmm.constants import (
|
||||
AGENT_STATUS_ONLINE,
|
||||
AGENT_STATUS_OVERDUE,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_MESH_PERMS_TASK_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
AlertSeverity,
|
||||
AlertType,
|
||||
@@ -30,12 +47,36 @@ from tacticalrmm.constants import (
|
||||
PAStatus,
|
||||
TaskStatus,
|
||||
TaskSyncStatus,
|
||||
TaskType,
|
||||
)
|
||||
from tacticalrmm.helpers import rand_range
|
||||
from tacticalrmm.utils import DjangoConnectionThreadPoolExecutor, redis_lock
|
||||
from tacticalrmm.helpers import make_random_password, setup_nats_options
|
||||
from tacticalrmm.logger import logger
|
||||
from tacticalrmm.nats_utils import a_nats_cmd
|
||||
from tacticalrmm.permissions import _has_perm_on_agent
|
||||
from tacticalrmm.utils import redis_lock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.db.models import QuerySet
|
||||
from nats.aio.client import Client as NATSClient
|
||||
|
||||
|
||||
def remove_orphaned_history_results() -> int:
|
||||
try:
|
||||
with transaction.atomic():
|
||||
check_hist_agentids = CheckHistory.objects.values_list(
|
||||
"agent_id", flat=True
|
||||
).distinct()
|
||||
current_agentids = set(Agent.objects.values_list("agent_id", flat=True))
|
||||
orphaned_agentids = [
|
||||
i for i in check_hist_agentids if i not in current_agentids
|
||||
]
|
||||
count, _ = CheckHistory.objects.filter(
|
||||
agent_id__in=orphaned_agentids
|
||||
).delete()
|
||||
return count
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
return 0
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -44,6 +85,8 @@ def core_maintenance_tasks() -> None:
|
||||
remove_if_not_scheduled=True, expire_date__lt=djangotime.now()
|
||||
).delete()
|
||||
|
||||
remove_orphaned_history_results()
|
||||
|
||||
core = get_core_settings()
|
||||
|
||||
# remove old CheckHistory data
|
||||
@@ -148,50 +191,150 @@ def sync_scheduled_tasks(self) -> str:
|
||||
if not acquired:
|
||||
return f"{self.app.oid} still running"
|
||||
|
||||
task_actions = [] # list of tuples
|
||||
actions: list[tuple[str, int, Agent, Any, str, str]] = [] # list of tuples
|
||||
|
||||
for agent in _get_agent_qs():
|
||||
if (
|
||||
pyver.parse(agent.version) >= pyver.parse("1.6.0")
|
||||
not agent.is_posix
|
||||
and pyver.parse(agent.version) >= pyver.parse("1.6.0")
|
||||
and agent.status == AGENT_STATUS_ONLINE
|
||||
):
|
||||
# create a list of tasks to be synced so we can run them in parallel later with thread pool executor
|
||||
# create a list of tasks to be synced so we can run them asynchronously
|
||||
for task in agent.get_tasks_with_policies():
|
||||
agent_obj = agent if task.policy else None
|
||||
# TODO can we just use agent??
|
||||
agent_obj: "Agent" = agent if task.policy else task.agent
|
||||
|
||||
# onboarding tasks require agent >= 2.6.0
|
||||
if task.task_type == TaskType.ONBOARDING and pyver.parse(
|
||||
agent.version
|
||||
) < pyver.parse("2.6.0"):
|
||||
continue
|
||||
|
||||
# policy tasks will be an empty dict on initial
|
||||
if (not task.task_result) or (
|
||||
isinstance(task.task_result, TaskResult)
|
||||
and task.task_result.sync_status == TaskSyncStatus.INITIAL
|
||||
):
|
||||
task_actions.append(("create", task.id, agent_obj))
|
||||
actions.append(
|
||||
(
|
||||
"create",
|
||||
task.id,
|
||||
agent_obj,
|
||||
task.generate_nats_task_payload(),
|
||||
agent.agent_id,
|
||||
agent.hostname,
|
||||
)
|
||||
)
|
||||
elif (
|
||||
isinstance(task.task_result, TaskResult)
|
||||
and task.task_result.sync_status
|
||||
== TaskSyncStatus.PENDING_DELETION
|
||||
):
|
||||
task_actions.append(("delete", task.id, agent_obj))
|
||||
actions.append(
|
||||
(
|
||||
"delete",
|
||||
task.id,
|
||||
agent_obj,
|
||||
{},
|
||||
agent.agent_id,
|
||||
agent.hostname,
|
||||
)
|
||||
)
|
||||
elif (
|
||||
isinstance(task.task_result, TaskResult)
|
||||
and task.task_result.sync_status == TaskSyncStatus.NOT_SYNCED
|
||||
):
|
||||
task_actions.append(("modify", task.id, agent_obj))
|
||||
actions.append(
|
||||
(
|
||||
"modify",
|
||||
task.id,
|
||||
agent_obj,
|
||||
task.generate_nats_task_payload(),
|
||||
agent.agent_id,
|
||||
agent.hostname,
|
||||
)
|
||||
)
|
||||
|
||||
def _handle_task(actions: tuple[str, int, Any]) -> None:
|
||||
time.sleep(rand_range(50, 600))
|
||||
task: "AutomatedTask" = AutomatedTask.objects.get(id=actions[1])
|
||||
if actions[0] == "create":
|
||||
task.create_task_on_agent(agent=actions[2])
|
||||
elif actions[0] == "modify":
|
||||
task.modify_task_on_agent(agent=actions[2])
|
||||
elif actions[0] == "delete":
|
||||
task.delete_task_on_agent(agent=actions[2])
|
||||
async def _handle_task_on_agent(
|
||||
nc: "NATSClient", actions: tuple[str, int, Agent, Any, str, str]
|
||||
) -> None:
|
||||
# tuple: (0: action, 1: task.id, 2: agent object, 3: nats task payload, 4: agent_id, 5: agent hostname)
|
||||
action = actions[0]
|
||||
task_id = actions[1]
|
||||
agent = actions[2]
|
||||
payload = actions[3]
|
||||
agent_id = actions[4]
|
||||
hostname = actions[5]
|
||||
|
||||
# TODO this is a janky hack
|
||||
# Rework this with asyncio. Need to rewrite all sync db operations with django's new async api
|
||||
with DjangoConnectionThreadPoolExecutor(max_workers=50) as executor:
|
||||
executor.map(_handle_task, task_actions)
|
||||
task: "AutomatedTask" = await AutomatedTask.objects.aget(id=task_id)
|
||||
try:
|
||||
task_result = await TaskResult.objects.aget(agent=agent, task=task)
|
||||
except TaskResult.DoesNotExist:
|
||||
task_result = await TaskResult.objects.acreate(agent=agent, task=task)
|
||||
|
||||
return "completed"
|
||||
if action in ("create", "modify"):
|
||||
logger.debug(payload)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": payload,
|
||||
}
|
||||
|
||||
r = await a_nats_cmd(nc=nc, sub=agent_id, data=nats_data, timeout=10)
|
||||
if r != "ok":
|
||||
if action == "create":
|
||||
task_result.sync_status = TaskSyncStatus.INITIAL
|
||||
else:
|
||||
task_result.sync_status = TaskSyncStatus.NOT_SYNCED
|
||||
|
||||
logger.error(
|
||||
f"Unable to {action} scheduled task {task.name} on {hostname}: {r}"
|
||||
)
|
||||
else:
|
||||
task_result.sync_status = TaskSyncStatus.SYNCED
|
||||
logger.info(
|
||||
f"{hostname} task {task.name} was {'created' if action == 'create' else 'modified'}"
|
||||
)
|
||||
|
||||
await task_result.asave(update_fields=["sync_status"])
|
||||
# delete
|
||||
else:
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": task.win_task_name},
|
||||
}
|
||||
r = await a_nats_cmd(nc=nc, sub=agent_id, data=nats_data, timeout=10)
|
||||
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
task_result.sync_status = TaskSyncStatus.PENDING_DELETION
|
||||
|
||||
with suppress(DatabaseError):
|
||||
await task_result.asave(update_fields=["sync_status"])
|
||||
|
||||
logger.error(
|
||||
f"Unable to {action} scheduled task {task.name} on {hostname}: {r}"
|
||||
)
|
||||
else:
|
||||
task_name = task.name
|
||||
await task.adelete()
|
||||
logger.info(f"{hostname} task {task_name} was deleted.")
|
||||
|
||||
async def _run():
|
||||
opts = setup_nats_options()
|
||||
try:
|
||||
nc = await nats.connect(**opts)
|
||||
except Exception as e:
|
||||
ret = str(e)
|
||||
logger.error(ret)
|
||||
return ret
|
||||
|
||||
if tasks := [_handle_task_on_agent(nc, task) for task in actions]:
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
await nc.flush()
|
||||
await nc.close()
|
||||
|
||||
asyncio.run(_run())
|
||||
return "ok"
|
||||
|
||||
|
||||
def _get_failing_data(agents: "QuerySet[Agent]") -> dict[str, bool]:
|
||||
@@ -252,3 +395,172 @@ def cache_db_fields_task() -> None:
|
||||
agents = qs.filter(site__client=client)
|
||||
client.failing_checks = _get_failing_data(agents)
|
||||
client.save(update_fields=["failing_checks"])
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def sync_mesh_perms_task(self):
|
||||
with redis_lock(SYNC_MESH_PERMS_TASK_LOCK, self.app.oid) as acquired:
|
||||
if not acquired:
|
||||
return f"{self.app.oid} still running"
|
||||
|
||||
try:
|
||||
core = CoreSettings.objects.first()
|
||||
do_not_sync = not core.sync_mesh_with_trmm
|
||||
uri = get_mesh_ws_url()
|
||||
ms = MeshSync(uri)
|
||||
|
||||
if do_not_sync:
|
||||
for user in ms.mesh_users:
|
||||
ms.delete_user_from_mesh(mesh_user_id=user)
|
||||
|
||||
return
|
||||
|
||||
company_name = core.mesh_company_name
|
||||
mnp = {"action": "nodes"}
|
||||
mesh_nodes_raw = ms.mesh_action(payload=mnp, wait=True)["nodes"]
|
||||
|
||||
users = User.objects.select_related("role").filter(
|
||||
agent=None,
|
||||
is_installer_user=False,
|
||||
is_active=True,
|
||||
block_dashboard_login=False,
|
||||
)
|
||||
|
||||
trmm_agents_meshnodeids = [
|
||||
f"node//{i.hex_mesh_node_id}"
|
||||
for i in Agent.objects.only("mesh_node_id")
|
||||
if i.mesh_node_id
|
||||
]
|
||||
|
||||
mesh_users_dict = {}
|
||||
for user in users:
|
||||
full_name = build_mesh_display_name(
|
||||
first_name=user.first_name,
|
||||
last_name=user.last_name,
|
||||
company_name=company_name,
|
||||
)
|
||||
|
||||
# mesh user creation will fail if same email exists for another user
|
||||
# make sure that doesn't happen by making a random email
|
||||
rand_str1 = make_random_password(len=6)
|
||||
rand_str2 = make_random_password(len=5)
|
||||
# for trmm users whos usernames are emails
|
||||
email_prefix = make_alpha_numeric(user.username)
|
||||
email = f"{email_prefix}.{rand_str1}@tacticalrmm-do-not-change-{rand_str2}.local"
|
||||
mesh_users_dict[user.mesh_user_id] = {
|
||||
"_id": user.mesh_user_id,
|
||||
"username": user.mesh_username,
|
||||
"full_name": full_name,
|
||||
"email": email,
|
||||
}
|
||||
|
||||
new_trmm_agents = []
|
||||
for agent in Agent.objects.defer(*AGENT_DEFER):
|
||||
if not agent.mesh_node_id:
|
||||
continue
|
||||
agent_dict = {
|
||||
"node_id": f"node//{agent.hex_mesh_node_id}",
|
||||
"hostname": agent.hostname,
|
||||
}
|
||||
tmp: list[dict[str, str]] = []
|
||||
for user in users:
|
||||
if not has_mesh_perms(user=user):
|
||||
logger.debug(f"No mesh perms for {user} on {agent.hostname}")
|
||||
continue
|
||||
|
||||
if (user.is_superuser or is_superuser(user)) or _has_perm_on_agent(
|
||||
user, agent.agent_id
|
||||
):
|
||||
tmp.append({"_id": user.mesh_user_id})
|
||||
|
||||
agent_dict["links"] = tmp
|
||||
new_trmm_agents.append(agent_dict)
|
||||
|
||||
final_trmm = transform_trmm(new_trmm_agents)
|
||||
final_mesh = transform_mesh(mesh_nodes_raw)
|
||||
|
||||
# delete users first
|
||||
source_users_global = set()
|
||||
for item in final_trmm:
|
||||
source_users_global.update(item["user_ids"])
|
||||
|
||||
target_users_global = set()
|
||||
for item in final_mesh:
|
||||
target_users_global.update(item["user_ids"])
|
||||
|
||||
# identify and create new users
|
||||
new_users = list(source_users_global - target_users_global)
|
||||
for user_id in new_users:
|
||||
user_info = mesh_users_dict[user_id]
|
||||
logger.info(f"Adding new user {user_info['username']} to mesh")
|
||||
ms.add_user_to_mesh(user_info=user_info)
|
||||
|
||||
users_to_delete_globally = list(target_users_global - source_users_global)
|
||||
for user_id in users_to_delete_globally:
|
||||
logger.info(f"Deleting {user_id} from mesh")
|
||||
ms.delete_user_from_mesh(mesh_user_id=user_id)
|
||||
|
||||
source_map = {item["node_id"]: set(item["user_ids"]) for item in final_trmm}
|
||||
target_map = {item["node_id"]: set(item["user_ids"]) for item in final_mesh}
|
||||
|
||||
def _get_sleep_after_n_inter(n):
|
||||
# {number of agents: chunk size}
|
||||
thresholds = {250: 150, 500: 275, 800: 300, 1000: 340}
|
||||
for threshold, value in sorted(thresholds.items()):
|
||||
if n <= threshold:
|
||||
return value
|
||||
|
||||
return 375
|
||||
|
||||
iter_count = 0
|
||||
sleep_after = _get_sleep_after_n_inter(len(source_map))
|
||||
|
||||
for node_id, source_users in source_map.items():
|
||||
# skip agents without valid node id
|
||||
if node_id not in trmm_agents_meshnodeids:
|
||||
continue
|
||||
|
||||
target_users = target_map.get(node_id, set()) - set(
|
||||
users_to_delete_globally
|
||||
)
|
||||
source_users_adjusted = source_users - set(users_to_delete_globally)
|
||||
|
||||
# find users that need to be added or deleted
|
||||
users_to_add = list(source_users_adjusted - target_users)
|
||||
users_to_delete = list(target_users - source_users_adjusted)
|
||||
|
||||
if users_to_add or users_to_delete:
|
||||
iter_count += 1
|
||||
|
||||
if users_to_add:
|
||||
logger.info(f"Adding {users_to_add} to {node_id}")
|
||||
ms.add_users_to_node(node_id=node_id, user_ids=users_to_add)
|
||||
|
||||
if users_to_delete:
|
||||
logger.info(f"Deleting {users_to_delete} from {node_id}")
|
||||
ms.delete_users_from_node(node_id=node_id, user_ids=users_to_delete)
|
||||
|
||||
if iter_count % sleep_after == 0 and iter_count != 0:
|
||||
# mesh is very inefficient with sql, give it time to catch up so we don't crash the system
|
||||
logger.info(
|
||||
f"Sleeping for 7 seconds after {iter_count} iterations."
|
||||
)
|
||||
sleep(7)
|
||||
|
||||
# after all done, see if need to update display name
|
||||
ms2 = MeshSync(uri)
|
||||
unique_ids = ms2.get_unique_mesh_users(new_trmm_agents)
|
||||
for user in unique_ids:
|
||||
try:
|
||||
mesh_realname = ms2.mesh_users[user]["realname"]
|
||||
except KeyError:
|
||||
mesh_realname = ""
|
||||
trmm_realname = mesh_users_dict[user]["full_name"]
|
||||
if mesh_realname != trmm_realname:
|
||||
logger.info(
|
||||
f"Display names don't match. Updating {user} name from {mesh_realname} to {trmm_realname}"
|
||||
)
|
||||
ms2.update_mesh_displayname(user_info=mesh_users_dict[user])
|
||||
|
||||
except Exception:
|
||||
logger.debug(traceback.format_exc())
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
import requests
|
||||
@@ -11,16 +12,15 @@ from model_bakery import baker
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
# from agents.models import Agent
|
||||
from core.utils import get_core_settings, get_meshagent_url
|
||||
from core.utils import get_core_settings, get_mesh_ws_url, get_meshagent_url
|
||||
|
||||
# from logs.models import PendingAction
|
||||
from tacticalrmm.constants import (
|
||||
from tacticalrmm.constants import ( # PAAction,; PAStatus,
|
||||
CONFIG_MGMT_CMDS,
|
||||
CustomFieldModel,
|
||||
MeshAgentIdent,
|
||||
# PAAction,
|
||||
# PAStatus,
|
||||
)
|
||||
from tacticalrmm.helpers import get_nats_hosts, get_nats_url
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .consumers import DashInfo
|
||||
@@ -110,18 +110,63 @@ class TestCoreTasks(TacticalTestCase):
|
||||
|
||||
def test_edit_coresettings(self):
|
||||
url = "/core/settings/"
|
||||
|
||||
# setup
|
||||
baker.make("automation.Policy", _quantity=2)
|
||||
# test normal request
|
||||
data = {
|
||||
"smtp_from_email": "newexample@example.com",
|
||||
"mesh_token": "New_Mesh_Token",
|
||||
"mesh_site": "https://mesh.example.com",
|
||||
"mesh_username": "bob",
|
||||
"sync_mesh_with_trmm": False,
|
||||
}
|
||||
r = self.client.put(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(get_core_settings().smtp_from_email, data["smtp_from_email"])
|
||||
self.assertEqual(get_core_settings().mesh_token, data["mesh_token"])
|
||||
core = get_core_settings()
|
||||
self.assertEqual(core.smtp_from_email, "newexample@example.com")
|
||||
self.assertEqual(core.mesh_token, "New_Mesh_Token")
|
||||
self.assertEqual(core.mesh_site, "https://mesh.example.com")
|
||||
self.assertEqual(core.mesh_username, "bob")
|
||||
self.assertFalse(core.sync_mesh_with_trmm)
|
||||
|
||||
# test to_representation
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.data["smtp_from_email"], "newexample@example.com")
|
||||
self.assertEqual(r.data["mesh_token"], "New_Mesh_Token")
|
||||
self.assertEqual(r.data["mesh_site"], "https://mesh.example.com")
|
||||
self.assertEqual(r.data["mesh_username"], "bob")
|
||||
self.assertFalse(r.data["sync_mesh_with_trmm"])
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@override_settings(HOSTED=True)
|
||||
def test_hosted_edit_coresettings(self):
|
||||
url = "/core/settings/"
|
||||
baker.make("automation.Policy", _quantity=2)
|
||||
data = {
|
||||
"smtp_from_email": "newexample1@example.com",
|
||||
"mesh_token": "abc123",
|
||||
"mesh_site": "https://mesh15534.example.com",
|
||||
"mesh_username": "jane",
|
||||
"sync_mesh_with_trmm": False,
|
||||
}
|
||||
r = self.client.put(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
core = get_core_settings()
|
||||
self.assertEqual(core.smtp_from_email, "newexample1@example.com")
|
||||
self.assertIn("41410834b8bb4481446027f8", core.mesh_token) # type: ignore
|
||||
self.assertTrue(core.sync_mesh_with_trmm)
|
||||
if "GHACTIONS" in os.environ:
|
||||
self.assertEqual(core.mesh_site, "https://example.com")
|
||||
self.assertEqual(core.mesh_username, "pipeline")
|
||||
|
||||
# test to_representation
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.data["smtp_from_email"], "newexample1@example.com")
|
||||
self.assertEqual(r.data["mesh_token"], "n/a")
|
||||
self.assertEqual(r.data["mesh_site"], "n/a")
|
||||
self.assertEqual(r.data["mesh_username"], "n/a")
|
||||
self.assertTrue(r.data["sync_mesh_with_trmm"])
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@@ -445,6 +490,80 @@ class TestCoreMgmtCommands(TacticalTestCase):
|
||||
call_command("get_config", cmd)
|
||||
|
||||
|
||||
class TestNatsUrls(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_standard_install(self):
|
||||
self.assertEqual(get_nats_url(), "nats://127.0.0.1:4222")
|
||||
|
||||
@override_settings(
|
||||
NATS_STANDARD_PORT=5000,
|
||||
USE_NATS_STANDARD=True,
|
||||
ALLOWED_HOSTS=["api.example.com"],
|
||||
)
|
||||
def test_custom_port_nats_standard(self):
|
||||
self.assertEqual(get_nats_url(), "tls://api.example.com:5000")
|
||||
|
||||
@override_settings(DOCKER_BUILD=True, ALLOWED_HOSTS=["api.example.com"])
|
||||
def test_docker_nats(self):
|
||||
self.assertEqual(get_nats_url(), "nats://api.example.com:4222")
|
||||
|
||||
@patch.dict("os.environ", {"NATS_CONNECT_HOST": "172.20.4.3"})
|
||||
@override_settings(ALLOWED_HOSTS=["api.example.com"])
|
||||
def test_custom_connect_host_env(self):
|
||||
self.assertEqual(get_nats_url(), "nats://172.20.4.3:4222")
|
||||
|
||||
def test_standard_nats_hosts(self):
|
||||
self.assertEqual(get_nats_hosts(), ("127.0.0.1", "127.0.0.1", "127.0.0.1"))
|
||||
|
||||
@override_settings(DOCKER_BUILD=True, ALLOWED_HOSTS=["api.example.com"])
|
||||
def test_docker_nats_hosts(self):
|
||||
self.assertEqual(get_nats_hosts(), ("0.0.0.0", "0.0.0.0", "api.example.com"))
|
||||
|
||||
|
||||
class TestMeshWSUrl(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("core.utils.get_auth_token")
|
||||
def test_standard_install(self, mock_token):
|
||||
mock_token.return_value = "abc123"
|
||||
self.assertEqual(
|
||||
get_mesh_ws_url(), "ws://127.0.0.1:4430/control.ashx?auth=abc123"
|
||||
)
|
||||
|
||||
@patch("core.utils.get_auth_token")
|
||||
@override_settings(MESH_PORT=8876)
|
||||
def test_standard_install_custom_port(self, mock_token):
|
||||
mock_token.return_value = "abc123"
|
||||
self.assertEqual(
|
||||
get_mesh_ws_url(), "ws://127.0.0.1:8876/control.ashx?auth=abc123"
|
||||
)
|
||||
|
||||
@patch("core.utils.get_auth_token")
|
||||
@override_settings(DOCKER_BUILD=True, MESH_WS_URL="ws://tactical-meshcentral:4443")
|
||||
def test_docker_install(self, mock_token):
|
||||
mock_token.return_value = "abc123"
|
||||
self.assertEqual(
|
||||
get_mesh_ws_url(), "ws://tactical-meshcentral:4443/control.ashx?auth=abc123"
|
||||
)
|
||||
|
||||
@patch("core.utils.get_auth_token")
|
||||
@override_settings(USE_EXTERNAL_MESH=True)
|
||||
def test_external_mesh(self, mock_token):
|
||||
mock_token.return_value = "abc123"
|
||||
|
||||
from core.models import CoreSettings
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
core.mesh_site = "https://mesh.external.com" # type: ignore
|
||||
core.save(update_fields=["mesh_site"]) # type: ignore
|
||||
self.assertEqual(
|
||||
get_mesh_ws_url(), "wss://mesh.external.com/control.ashx?auth=abc123"
|
||||
)
|
||||
|
||||
|
||||
class TestCorePermissions(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_client()
|
||||
@@ -464,7 +583,7 @@ class TestCoreUtils(TacticalTestCase):
|
||||
)
|
||||
self.assertEqual(
|
||||
r,
|
||||
"https://mesh.example.com/meshagents?id=abc123&installflags=2&meshinstall=10005",
|
||||
"http://127.0.0.1:4430/meshagents?id=abc123&installflags=2&meshinstall=10005",
|
||||
)
|
||||
|
||||
r = get_meshagent_url(
|
||||
@@ -475,7 +594,7 @@ class TestCoreUtils(TacticalTestCase):
|
||||
)
|
||||
self.assertEqual(
|
||||
r,
|
||||
"https://mesh.example.com/meshagents?id=4&meshid=abc123&installflags=0",
|
||||
"http://127.0.0.1:4430/meshagents?id=4&meshid=abc123&installflags=0",
|
||||
)
|
||||
|
||||
@override_settings(DOCKER_BUILD=True)
|
||||
@@ -503,8 +622,8 @@ class TestCoreUtils(TacticalTestCase):
|
||||
"http://tactical-meshcentral:4443/meshagents?id=4&meshid=abc123&installflags=0",
|
||||
)
|
||||
|
||||
@override_settings(TRMM_INSECURE=True)
|
||||
def test_get_meshagent_url_insecure(self):
|
||||
@override_settings(USE_EXTERNAL_MESH=True)
|
||||
def test_get_meshagent_url_external_mesh(self):
|
||||
r = get_meshagent_url(
|
||||
ident=MeshAgentIdent.DARWIN_UNIVERSAL,
|
||||
plat="darwin",
|
||||
@@ -513,7 +632,7 @@ class TestCoreUtils(TacticalTestCase):
|
||||
)
|
||||
self.assertEqual(
|
||||
r,
|
||||
"http://mesh.example.com:4430/meshagents?id=abc123&installflags=2&meshinstall=10005",
|
||||
"https://mesh.example.com/meshagents?id=abc123&installflags=2&meshinstall=10005",
|
||||
)
|
||||
|
||||
r = get_meshagent_url(
|
||||
@@ -524,5 +643,29 @@ class TestCoreUtils(TacticalTestCase):
|
||||
)
|
||||
self.assertEqual(
|
||||
r,
|
||||
"http://mesh.example.com:4430/meshagents?id=4&meshid=abc123&installflags=0",
|
||||
"https://mesh.example.com/meshagents?id=4&meshid=abc123&installflags=0",
|
||||
)
|
||||
|
||||
@override_settings(MESH_PORT=8653)
|
||||
def test_get_meshagent_url_mesh_port(self):
|
||||
r = get_meshagent_url(
|
||||
ident=MeshAgentIdent.DARWIN_UNIVERSAL,
|
||||
plat="darwin",
|
||||
mesh_site="https://mesh.example.com",
|
||||
mesh_device_id="abc123",
|
||||
)
|
||||
self.assertEqual(
|
||||
r,
|
||||
"http://127.0.0.1:8653/meshagents?id=abc123&installflags=2&meshinstall=10005",
|
||||
)
|
||||
|
||||
r = get_meshagent_url(
|
||||
ident=MeshAgentIdent.WIN64,
|
||||
plat="windows",
|
||||
mesh_site="https://mesh.example.com",
|
||||
mesh_device_id="abc123",
|
||||
)
|
||||
self.assertEqual(
|
||||
r,
|
||||
"http://127.0.0.1:8653/meshagents?id=4&meshid=abc123&installflags=0",
|
||||
)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.urls import path
|
||||
from django.conf import settings
|
||||
|
||||
from . import views
|
||||
|
||||
@@ -16,8 +17,20 @@ urlpatterns = [
|
||||
path("urlaction/", views.GetAddURLAction.as_view()),
|
||||
path("urlaction/<int:pk>/", views.UpdateDeleteURLAction.as_view()),
|
||||
path("urlaction/run/", views.RunURLAction.as_view()),
|
||||
path("urlaction/run/test/", views.RunTestURLAction.as_view()),
|
||||
path("smstest/", views.TwilioSMSTest.as_view()),
|
||||
path("clearcache/", views.clear_cache),
|
||||
path("status/", views.status),
|
||||
path("openai/generate/", views.OpenAICodeCompletion.as_view()),
|
||||
path("webtermperms/", views.webterm_perms),
|
||||
]
|
||||
|
||||
|
||||
if not (
|
||||
getattr(settings, "HOSTED", False)
|
||||
or getattr(settings, "TRMM_DISABLE_SERVER_SCRIPTS", False)
|
||||
or getattr(settings, "DEMO", False)
|
||||
):
|
||||
urlpatterns += [
|
||||
path("serverscript/test/", views.TestRunServerScript.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,24 +1,32 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
import urllib.parse
|
||||
from base64 import b64encode
|
||||
from contextlib import suppress
|
||||
from typing import TYPE_CHECKING, Optional, cast
|
||||
|
||||
import requests
|
||||
import websockets
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.http import FileResponse
|
||||
from meshctrl.utils import get_auth_token
|
||||
from requests.utils import requote_uri
|
||||
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_TBL_PEND_ACTION_CNT_CACHE_PREFIX,
|
||||
CORESETTINGS_CACHE_KEY,
|
||||
ROLE_CACHE_PREFIX,
|
||||
TRMM_WS_MAX_SIZE,
|
||||
AgentPlat,
|
||||
MeshAgentIdent,
|
||||
)
|
||||
from tacticalrmm.logger import logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.models import CoreSettings
|
||||
@@ -58,7 +66,7 @@ def token_is_valid() -> tuple[str, bool]:
|
||||
def token_is_expired() -> bool:
|
||||
from core.models import CodeSignToken
|
||||
|
||||
t: "CodeSignToken" = CodeSignToken.objects.first()
|
||||
t: Optional["CodeSignToken"] = CodeSignToken.objects.first()
|
||||
if not t or not t.token:
|
||||
return False
|
||||
|
||||
@@ -83,23 +91,23 @@ def get_core_settings() -> "CoreSettings":
|
||||
|
||||
def get_mesh_ws_url() -> str:
|
||||
core = get_core_settings()
|
||||
token = get_auth_token(core.mesh_username, core.mesh_token)
|
||||
token = get_auth_token(core.mesh_api_superuser, core.mesh_token)
|
||||
|
||||
if settings.DOCKER_BUILD:
|
||||
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
|
||||
else:
|
||||
if getattr(settings, "TRMM_INSECURE", False):
|
||||
site = core.mesh_site.replace("https", "ws")
|
||||
uri = f"{site}:4430/control.ashx?auth={token}"
|
||||
else:
|
||||
if getattr(settings, "USE_EXTERNAL_MESH", False):
|
||||
site = core.mesh_site.replace("https", "wss")
|
||||
uri = f"{site}/control.ashx?auth={token}"
|
||||
else:
|
||||
mesh_port = getattr(settings, "MESH_PORT", 4430)
|
||||
uri = f"ws://127.0.0.1:{mesh_port}/control.ashx?auth={token}"
|
||||
|
||||
return uri
|
||||
|
||||
|
||||
async def get_mesh_device_id(uri: str, device_group: str) -> None:
|
||||
async with websockets.connect(uri) as ws:
|
||||
async with websockets.connect(uri, max_size=TRMM_WS_MAX_SIZE) as ws:
|
||||
payload = {"action": "meshes", "responseid": "meshctrl"}
|
||||
await ws.send(json.dumps(payload))
|
||||
|
||||
@@ -113,7 +121,7 @@ async def get_mesh_device_id(uri: str, device_group: str) -> None:
|
||||
|
||||
def download_mesh_agent(dl_url: str) -> FileResponse:
|
||||
with tempfile.NamedTemporaryFile(prefix="mesh-", dir=settings.EXE_DIR) as fp:
|
||||
r = requests.get(dl_url, stream=True, timeout=15)
|
||||
r = requests.get(dl_url, stream=True, timeout=15, verify=False)
|
||||
with open(fp.name, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
@@ -185,10 +193,11 @@ def get_meshagent_url(
|
||||
) -> str:
|
||||
if settings.DOCKER_BUILD:
|
||||
base = settings.MESH_WS_URL.replace("ws://", "http://")
|
||||
elif getattr(settings, "TRMM_INSECURE", False):
|
||||
base = mesh_site.replace("https", "http") + ":4430"
|
||||
else:
|
||||
elif getattr(settings, "USE_EXTERNAL_MESH", False):
|
||||
base = mesh_site
|
||||
else:
|
||||
mesh_port = getattr(settings, "MESH_PORT", 4430)
|
||||
base = f"http://127.0.0.1:{mesh_port}"
|
||||
|
||||
if plat == AgentPlat.WINDOWS:
|
||||
params = {
|
||||
@@ -204,3 +213,173 @@ def get_meshagent_url(
|
||||
}
|
||||
|
||||
return base + "/meshagents?" + urllib.parse.urlencode(params)
|
||||
|
||||
|
||||
def make_alpha_numeric(s: str):
|
||||
return "".join(filter(str.isalnum, s))
|
||||
|
||||
|
||||
def find_and_replace_db_values_str(*, text: str, instance):
|
||||
from tacticalrmm.utils import RE_DB_VALUE, get_db_value
|
||||
|
||||
if not instance:
|
||||
return text
|
||||
|
||||
return_string = text
|
||||
|
||||
for string, model, prop in RE_DB_VALUE.findall(text):
|
||||
value = get_db_value(string=f"{model}.{prop}", instance=instance)
|
||||
return_string = return_string.replace(string, str(value))
|
||||
return return_string
|
||||
|
||||
|
||||
# usually for stderr fields that contain windows file paths, like {{alert.get_result.stderr}}
|
||||
# but preserves newlines or tabs
|
||||
# removes all control chars
|
||||
def _sanitize_webhook(s: str) -> str:
|
||||
s = re.sub(r"[\x00-\x08\x0b\x0c\x0e-\x1f\x7f-\x9f]", " ", s)
|
||||
s = re.sub(r"(?<!\\)(\\)(?![\\nrt])", r"\\\\", s)
|
||||
return s
|
||||
|
||||
|
||||
def _run_url_rest_action(*, url: str, method, body: str, headers: str, instance=None):
|
||||
# replace url
|
||||
new_url = find_and_replace_db_values_str(text=url, instance=instance)
|
||||
new_body = find_and_replace_db_values_str(text=body, instance=instance)
|
||||
new_headers = find_and_replace_db_values_str(text=headers, instance=instance)
|
||||
new_url = requote_uri(new_url)
|
||||
|
||||
new_body = _sanitize_webhook(new_body)
|
||||
try:
|
||||
new_body = json.loads(new_body, strict=False)
|
||||
except Exception as e:
|
||||
logger.error(f"{e=} {body=}")
|
||||
logger.error(f"{new_body=}")
|
||||
|
||||
try:
|
||||
new_headers = json.loads(new_headers, strict=False)
|
||||
except Exception as e:
|
||||
logger.error(f"{e=} {headers=}")
|
||||
logger.error(f"{new_headers=}")
|
||||
|
||||
if method in ("get", "delete"):
|
||||
return getattr(requests, method)(new_url, headers=new_headers)
|
||||
|
||||
return getattr(requests, method)(
|
||||
new_url,
|
||||
data=json.dumps(new_body),
|
||||
headers=new_headers,
|
||||
timeout=8,
|
||||
)
|
||||
|
||||
|
||||
def run_url_rest_action(*, action_id: int, instance=None) -> tuple[str, int]:
|
||||
import core.models
|
||||
|
||||
action = core.models.URLAction.objects.get(pk=action_id)
|
||||
method = action.rest_method
|
||||
url = action.pattern
|
||||
body = action.rest_body
|
||||
headers = action.rest_headers
|
||||
|
||||
try:
|
||||
response = _run_url_rest_action(
|
||||
url=url, method=method, body=body, headers=headers, instance=instance
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
return (str(e), 500)
|
||||
|
||||
return (response.text, response.status_code)
|
||||
|
||||
|
||||
lookup_apps = {
|
||||
"client": ("clients", "Client"),
|
||||
"site": ("clients", "Site"),
|
||||
"agent": ("agents", "Agent"),
|
||||
}
|
||||
|
||||
|
||||
def run_test_url_rest_action(
|
||||
*,
|
||||
url: str,
|
||||
method,
|
||||
body: str,
|
||||
headers: str,
|
||||
instance_type: Optional[str],
|
||||
instance_id: Optional[int],
|
||||
) -> tuple[str, str, str]:
|
||||
lookup_instance = None
|
||||
if instance_type and instance_type in lookup_apps and instance_id:
|
||||
app, model = lookup_apps[instance_type]
|
||||
Model = apps.get_model(app, model)
|
||||
if instance_type == "agent":
|
||||
lookup_instance = Model.objects.get(agent_id=instance_id)
|
||||
else:
|
||||
lookup_instance = Model.objects.get(pk=instance_id)
|
||||
|
||||
try:
|
||||
response = _run_url_rest_action(
|
||||
url=url, method=method, body=body, headers=headers, instance=lookup_instance
|
||||
)
|
||||
except requests.exceptions.ConnectionError as error:
|
||||
return (str(error), str(error.request.url), str(error.request.body))
|
||||
except Exception as e:
|
||||
return (str(e), str(e), str(e))
|
||||
|
||||
return (response.text, response.request.url, response.request.body)
|
||||
|
||||
|
||||
def run_server_script(
|
||||
*, body: str, args: list[str], env_vars: list[str], shell: str, timeout: int
|
||||
) -> tuple[str, str, float, int]:
|
||||
from core.models import CoreSettings
|
||||
from scripts.models import Script
|
||||
|
||||
core = CoreSettings.objects.only("enable_server_scripts").first()
|
||||
if not core.server_scripts_enabled: # type: ignore
|
||||
return "", "Error: this feature is disabled", 0.00, 1
|
||||
|
||||
parsed_args = Script.parse_script_args(None, shell, args)
|
||||
|
||||
parsed_env_vars = Script.parse_script_env_vars(None, shell=shell, env_vars=env_vars)
|
||||
|
||||
custom_env = os.environ.copy()
|
||||
for var in parsed_env_vars:
|
||||
var_split = var.split("=")
|
||||
custom_env[var_split[0]] = var_split[1]
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w", delete=False, prefix="trmm-"
|
||||
) as tmp_script:
|
||||
tmp_script.write(body.replace("\r\n", "\n"))
|
||||
tmp_script_path = tmp_script.name
|
||||
|
||||
os.chmod(tmp_script_path, 0o550)
|
||||
|
||||
stdout, stderr = "", ""
|
||||
retcode = 0
|
||||
|
||||
start_time = time.time()
|
||||
try:
|
||||
ret = subprocess.run(
|
||||
[tmp_script_path] + parsed_args,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
env=custom_env,
|
||||
timeout=timeout,
|
||||
)
|
||||
stdout, stderr, retcode = ret.stdout, ret.stderr, ret.returncode
|
||||
except subprocess.TimeoutExpired:
|
||||
stderr = f"Error: Timed out after {timeout} seconds."
|
||||
retcode = 98
|
||||
except Exception as e:
|
||||
stderr = f"Error: {e}"
|
||||
retcode = 99
|
||||
finally:
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
with suppress(Exception):
|
||||
os.remove(tmp_script_path)
|
||||
|
||||
return stdout, stderr, execution_time, retcode
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import json
|
||||
import re
|
||||
from contextlib import suppress
|
||||
from pathlib import Path
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import psutil
|
||||
import requests
|
||||
@@ -13,15 +11,24 @@ from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from redis import from_url
|
||||
from rest_framework import serializers
|
||||
from rest_framework import status as drf_status
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from core.decorators import monitoring_view
|
||||
from core.utils import get_core_settings, sysd_svc_is_running, token_is_valid
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from core.utils import (
|
||||
get_core_settings,
|
||||
run_server_script,
|
||||
run_test_url_rest_action,
|
||||
sysd_svc_is_running,
|
||||
token_is_valid,
|
||||
)
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.constants import AuditActionType, PAStatus
|
||||
from tacticalrmm.helpers import get_certs, notify_error
|
||||
@@ -36,8 +43,11 @@ from .permissions import (
|
||||
CodeSignPerms,
|
||||
CoreSettingsPerms,
|
||||
CustomFieldPerms,
|
||||
GlobalKeyStorePerms,
|
||||
RunServerScriptPerms,
|
||||
ServerMaintPerms,
|
||||
URLActionPerms,
|
||||
WebTerminalPerms,
|
||||
)
|
||||
from .serializers import (
|
||||
CodeSignTokenSerializer,
|
||||
@@ -56,14 +66,31 @@ class GetEditCoreSettings(APIView):
|
||||
return Response(CoreSettingsSerializer(settings).data)
|
||||
|
||||
def put(self, request):
|
||||
data = request.data.copy()
|
||||
|
||||
if getattr(settings, "HOSTED", False):
|
||||
data.pop("mesh_site")
|
||||
data.pop("mesh_token")
|
||||
data.pop("mesh_username")
|
||||
data["sync_mesh_with_trmm"] = True
|
||||
data["enable_server_scripts"] = False
|
||||
data["enable_server_webterminal"] = False
|
||||
|
||||
coresettings = CoreSettings.objects.first()
|
||||
serializer = CoreSettingsSerializer(instance=coresettings, data=request.data)
|
||||
serializer = CoreSettingsSerializer(instance=coresettings, data=data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([AllowAny])
|
||||
def home(request):
|
||||
return Response({"status": "ok"})
|
||||
|
||||
|
||||
@api_view()
|
||||
def version(request):
|
||||
return Response(settings.APP_VER)
|
||||
@@ -91,9 +118,9 @@ def dashboard_info(request):
|
||||
"show_community_scripts": request.user.show_community_scripts,
|
||||
"dbl_click_action": request.user.agent_dblclick_action,
|
||||
"default_agent_tbl_tab": request.user.default_agent_tbl_tab,
|
||||
"url_action": request.user.url_action.id
|
||||
if request.user.url_action
|
||||
else None,
|
||||
"url_action": (
|
||||
request.user.url_action.id if request.user.url_action else None
|
||||
),
|
||||
"client_tree_sort": request.user.client_tree_sort,
|
||||
"client_tree_splitter": request.user.client_tree_splitter,
|
||||
"loading_bar_color": request.user.loading_bar_color,
|
||||
@@ -108,6 +135,10 @@ def dashboard_info(request):
|
||||
"dash_negative_color": request.user.dash_negative_color,
|
||||
"dash_warning_color": request.user.dash_warning_color,
|
||||
"run_cmd_placeholder_text": runcmd_placeholder_text(),
|
||||
"server_scripts_enabled": core_settings.server_scripts_enabled,
|
||||
"web_terminal_enabled": core_settings.web_terminal_enabled,
|
||||
"block_local_user_logon": core_settings.block_local_user_logon,
|
||||
"sso_enabled": core_settings.sso_enabled,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -282,7 +313,7 @@ class CodeSign(APIView):
|
||||
|
||||
|
||||
class GetAddKeyStore(APIView):
|
||||
permission_classes = [IsAuthenticated, CoreSettingsPerms]
|
||||
permission_classes = [IsAuthenticated, GlobalKeyStorePerms]
|
||||
|
||||
def get(self, request):
|
||||
keys = GlobalKVStore.objects.all()
|
||||
@@ -297,7 +328,7 @@ class GetAddKeyStore(APIView):
|
||||
|
||||
|
||||
class UpdateDeleteKeyStore(APIView):
|
||||
permission_classes = [IsAuthenticated, CoreSettingsPerms]
|
||||
permission_classes = [IsAuthenticated, GlobalKeyStorePerms]
|
||||
|
||||
def put(self, request, pk):
|
||||
key = get_object_or_404(GlobalKVStore, pk=pk)
|
||||
@@ -315,7 +346,7 @@ class UpdateDeleteKeyStore(APIView):
|
||||
|
||||
|
||||
class GetAddURLAction(APIView):
|
||||
permission_classes = [IsAuthenticated, CoreSettingsPerms]
|
||||
permission_classes = [IsAuthenticated, URLActionPerms]
|
||||
|
||||
def get(self, request):
|
||||
actions = URLAction.objects.all()
|
||||
@@ -357,7 +388,7 @@ class RunURLAction(APIView):
|
||||
|
||||
from agents.models import Agent
|
||||
from clients.models import Client, Site
|
||||
from tacticalrmm.utils import get_db_value
|
||||
from tacticalrmm.utils import RE_DB_VALUE, get_db_value
|
||||
|
||||
if "agent_id" in request.data.keys():
|
||||
if not _has_perm_on_agent(request.user, request.data["agent_id"]):
|
||||
@@ -379,14 +410,12 @@ class RunURLAction(APIView):
|
||||
|
||||
action = get_object_or_404(URLAction, pk=request.data["action"])
|
||||
|
||||
pattern = re.compile("\\{\\{([\\w\\s]+\\.[\\w\\s]+)\\}\\}")
|
||||
|
||||
url_pattern = action.pattern
|
||||
|
||||
for string in re.findall(pattern, action.pattern):
|
||||
value = get_db_value(string=string, instance=instance)
|
||||
for string, model, prop in RE_DB_VALUE.findall(url_pattern):
|
||||
value = get_db_value(string=f"{model}.{prop}", instance=instance)
|
||||
|
||||
url_pattern = re.sub("\\{\\{" + string + "\\}\\}", str(value), url_pattern)
|
||||
url_pattern = url_pattern.replace(string, str(value))
|
||||
|
||||
AuditLog.audit_url_action(
|
||||
username=request.user.username,
|
||||
@@ -398,6 +427,119 @@ class RunURLAction(APIView):
|
||||
return Response(requote_uri(url_pattern))
|
||||
|
||||
|
||||
class RunTestURLAction(APIView):
|
||||
permission_classes = [IsAuthenticated, URLActionPerms]
|
||||
|
||||
class InputSerializer(serializers.Serializer):
|
||||
pattern = serializers.CharField(required=True)
|
||||
rest_body = serializers.CharField()
|
||||
rest_headers = serializers.CharField()
|
||||
rest_method = serializers.ChoiceField(
|
||||
required=True, choices=["get", "post", "put", "delete", "patch"]
|
||||
)
|
||||
run_instance_type = serializers.ChoiceField(
|
||||
choices=["agent", "client", "site", "none"]
|
||||
)
|
||||
run_instance_id = serializers.CharField(allow_null=True)
|
||||
|
||||
def post(self, request):
|
||||
serializer = self.InputSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
url = serializer.validated_data.get("pattern")
|
||||
body = serializer.validated_data.get("rest_body", None)
|
||||
headers = serializer.validated_data.get("rest_headers", None)
|
||||
method = serializer.validated_data.get("rest_method")
|
||||
instance_type = serializer.validated_data.get("run_instance_type", None)
|
||||
instance_id = serializer.validated_data.get("run_instance_id", None)
|
||||
|
||||
# make sure user has permissions to run against client/agent/site
|
||||
if instance_type == "agent":
|
||||
if not _has_perm_on_agent(request.user, instance_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
elif instance_type == "site":
|
||||
if not _has_perm_on_site(request.user, instance_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
elif instance_type == "client":
|
||||
if not _has_perm_on_client(request.user, instance_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
result, replaced_url, replaced_body = run_test_url_rest_action(
|
||||
url=url,
|
||||
body=body,
|
||||
headers=headers,
|
||||
method=method,
|
||||
instance_type=instance_type,
|
||||
instance_id=instance_id,
|
||||
)
|
||||
|
||||
AuditLog.audit_url_action_test(
|
||||
username=request.user.username,
|
||||
url=url,
|
||||
body=replaced_body,
|
||||
headers=headers,
|
||||
instance_type=instance_type,
|
||||
instance_id=instance_id,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
return Response({"url": replaced_url, "result": result, "body": replaced_body})
|
||||
|
||||
|
||||
class TestRunServerScript(APIView):
|
||||
permission_classes = [IsAuthenticated, RunServerScriptPerms]
|
||||
|
||||
def post(self, request):
|
||||
core: CoreSettings = CoreSettings.objects.first() # type: ignore
|
||||
if not core.server_scripts_enabled:
|
||||
return notify_error(
|
||||
"This feature is disabled. It can be enabled in Global Settings."
|
||||
)
|
||||
|
||||
code: str = request.data["code"]
|
||||
if not code.startswith("#!"):
|
||||
return notify_error("Missing shebang!")
|
||||
|
||||
stdout, stderr, execution_time, retcode = run_server_script(
|
||||
body=code,
|
||||
args=request.data["args"],
|
||||
env_vars=request.data["env_vars"],
|
||||
timeout=request.data["timeout"],
|
||||
shell=request.data["shell"],
|
||||
)
|
||||
|
||||
AuditLog.audit_test_script_run(
|
||||
username=request.user.username,
|
||||
agent=None,
|
||||
script_body=code,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
ret = {
|
||||
"stdout": stdout,
|
||||
"stderr": stderr,
|
||||
"execution_time": f"{execution_time:.4f}",
|
||||
"retcode": retcode,
|
||||
}
|
||||
|
||||
return Response(ret)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAuthenticated, WebTerminalPerms])
|
||||
def webterm_perms(request):
|
||||
# this view is only used to display a notification if feature is disabled
|
||||
# perms are actually enforced in the consumer
|
||||
core: CoreSettings = CoreSettings.objects.first() # type: ignore
|
||||
if not core.web_terminal_enabled:
|
||||
ret = "This feature is disabled. It can be enabled in Global Settings."
|
||||
return Response(ret, status=drf_status.HTTP_412_PRECONDITION_FAILED)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class TwilioSMSTest(APIView):
|
||||
permission_classes = [IsAuthenticated, CoreSettingsPerms]
|
||||
|
||||
@@ -428,9 +570,7 @@ def status(request):
|
||||
cert_bytes = Path(cert_file).read_bytes()
|
||||
|
||||
cert = x509.load_pem_x509_certificate(cert_bytes)
|
||||
expires = cert.not_valid_after.replace(tzinfo=ZoneInfo("UTC"))
|
||||
now = djangotime.now()
|
||||
delta = expires - now
|
||||
delta = cert.not_valid_after_utc - djangotime.now()
|
||||
|
||||
redis_url = f"redis://{settings.REDIS_HOST}"
|
||||
redis_ping = False
|
||||
|
||||
@@ -4,7 +4,7 @@ Copyright (c) 2023 Amidaware Inc. All rights reserved.
|
||||
|
||||
This Agreement is entered into between the licensee ("You" or the "Licensee") and Amidaware Inc. ("Amidaware") and governs the use of the enterprise features of the Tactical RMM Software (hereinafter referred to as the "Software").
|
||||
|
||||
The EE features of the Software, including but not limited to Reporting and White-labeling, are exclusively contained within directories named "ee," "enterprise," or "premium" in Amidaware's repositories, or in any files bearing the EE License header. The use of the Software is also governed by the terms and conditions set forth in the Tactical RMM License, available at https://license.tacticalrmm.com, which terms are incorporated herein by reference.
|
||||
The EE features of the Software, including but not limited to SSO (Single Sign-On), Reporting and White-labeling, are exclusively contained within directories named "ee," "enterprise," or "premium" in Amidaware's repositories, or in any files bearing the EE License header. The use of the Software is also governed by the terms and conditions set forth in the Tactical RMM License, available at https://license.tacticalrmm.com, which terms are incorporated herein by reference.
|
||||
|
||||
## License Grant
|
||||
|
||||
|
||||
28
api/tacticalrmm/ee/reporting/custom_filters.py
Normal file
28
api/tacticalrmm/ee/reporting/custom_filters.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from contextlib import suppress
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import validators
|
||||
|
||||
|
||||
def as_tz(date_obj, tz, format="%b %d %Y, %I:%M %p"):
|
||||
return date_obj.astimezone(ZoneInfo(tz)).strftime(format)
|
||||
|
||||
|
||||
def local_ips(wmi_detail):
|
||||
ret = []
|
||||
with suppress(Exception):
|
||||
ips = wmi_detail["network_config"]
|
||||
for i in ips:
|
||||
try:
|
||||
addr = [x["IPAddress"] for x in i if "IPAddress" in x][0]
|
||||
except:
|
||||
continue
|
||||
|
||||
if addr is None:
|
||||
continue
|
||||
|
||||
for ip in addr:
|
||||
if validators.ipv4(ip):
|
||||
ret.append(ip)
|
||||
|
||||
return ret
|
||||
@@ -3,6 +3,7 @@ Copyright (c) 2023-present Amidaware Inc.
|
||||
This file is subject to the EE License Agreement.
|
||||
For details, see: https://license.tacticalrmm.com/ee
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Tuple
|
||||
|
||||
|
||||
@@ -3,11 +3,10 @@ Copyright (c) 2023-present Amidaware Inc.
|
||||
This file is subject to the EE License Agreement.
|
||||
For details, see: https://license.tacticalrmm.com/ee
|
||||
"""
|
||||
|
||||
import urllib.parse
|
||||
from time import sleep
|
||||
from typing import Any, Optional
|
||||
|
||||
import requests
|
||||
from core.models import CodeSignToken
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
@@ -25,39 +24,12 @@ class Command(BaseCommand):
|
||||
self.stdout.write(url)
|
||||
return
|
||||
|
||||
attempts = 0
|
||||
while 1:
|
||||
try:
|
||||
r = requests.post(
|
||||
settings.REPORTING_CHECK_URL,
|
||||
json={"token": t.token, "api": settings.ALLOWED_HOSTS[0]},
|
||||
headers={"Content-type": "application/json"},
|
||||
timeout=15,
|
||||
)
|
||||
except Exception as e:
|
||||
self.stderr.write(str(e))
|
||||
attempts += 1
|
||||
sleep(3)
|
||||
else:
|
||||
if r.status_code // 100 in (3, 5):
|
||||
self.stderr.write(f"Error getting web tarball: {r.status_code}")
|
||||
attempts += 1
|
||||
sleep(3)
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts == 0:
|
||||
break
|
||||
elif attempts > 5:
|
||||
self.stdout.write(url)
|
||||
return
|
||||
|
||||
if r.status_code == 200: # type: ignore
|
||||
if t.is_valid:
|
||||
params = {
|
||||
"token": t.token,
|
||||
"webver": settings.WEB_VERSION,
|
||||
"api": settings.ALLOWED_HOSTS[0],
|
||||
}
|
||||
url = settings.REPORTING_DL_URL + urllib.parse.urlencode(params)
|
||||
url = settings.WEBTAR_DL_URL + urllib.parse.urlencode(params)
|
||||
|
||||
self.stdout.write(url)
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.6 on 2023-11-07 18:22
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('reporting', '0002_alter_reporttemplate_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='reporthtmltemplate',
|
||||
name='name',
|
||||
field=models.CharField(max_length=200, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='reporttemplate',
|
||||
name='name',
|
||||
field=models.CharField(max_length=200, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -19,7 +19,7 @@ class ReportFormatType(models.TextChoices):
|
||||
|
||||
|
||||
class ReportTemplate(models.Model):
|
||||
name = models.CharField(max_length=50, unique=True)
|
||||
name = models.CharField(max_length=200, unique=True)
|
||||
template_md = models.TextField()
|
||||
template_css = models.TextField(null=True, blank=True)
|
||||
template_html = models.ForeignKey(
|
||||
@@ -44,7 +44,7 @@ class ReportTemplate(models.Model):
|
||||
|
||||
|
||||
class ReportHTMLTemplate(models.Model):
|
||||
name = models.CharField(max_length=50, unique=True)
|
||||
name = models.CharField(max_length=200, unique=True)
|
||||
html = models.TextField()
|
||||
|
||||
def __str__(self) -> str:
|
||||
|
||||
@@ -187,9 +187,11 @@ class TestReportTemplateGenerateView:
|
||||
template=report_template.template_md,
|
||||
template_type=report_template.type,
|
||||
css=report_template.template_css if report_template.template_css else "",
|
||||
html_template=report_template.template_html.id
|
||||
if report_template.template_html
|
||||
else None,
|
||||
html_template=(
|
||||
report_template.template_html.id
|
||||
if report_template.template_html
|
||||
else None
|
||||
),
|
||||
variables=report_template.template_variables,
|
||||
dependencies={"client": 1},
|
||||
)
|
||||
|
||||
@@ -4,27 +4,28 @@ This file is subject to the EE License Agreement.
|
||||
For details, see: https://license.tacticalrmm.com/ee
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import inspect
|
||||
import json
|
||||
import re
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Literal, Optional, Tuple, Type, Union, cast
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import yaml
|
||||
from django.apps import apps
|
||||
from jinja2 import Environment, FunctionLoader
|
||||
from rest_framework.serializers import ValidationError
|
||||
from tacticalrmm.utils import get_db_value
|
||||
from weasyprint import CSS, HTML
|
||||
from weasyprint.text.fonts import FontConfiguration
|
||||
|
||||
from tacticalrmm.utils import get_db_value
|
||||
|
||||
from . import custom_filters
|
||||
from .constants import REPORTING_MODELS
|
||||
from .markdown.config import Markdown
|
||||
from .models import ReportAsset, ReportDataQuery, ReportHTMLTemplate, ReportTemplate
|
||||
|
||||
# regex for db data replacement
|
||||
# will return 3 groups of matches in a tuple when uses with re.findall
|
||||
# i.e. - {{client.name}}, client.name, client
|
||||
RE_DB_VALUE = re.compile(r"(\{\{\s*(client|site|agent|global)\.(.*)\s*\}\})")
|
||||
from tacticalrmm.utils import RE_DB_VALUE
|
||||
|
||||
RE_ASSET_URL = re.compile(
|
||||
r"(asset://([0-9a-f]{8}-[0-9a-f]{4}-[0-5][0-9a-f]{3}-[089ab][0-9a-f]{3}-[0-9a-f]{12}))"
|
||||
@@ -57,9 +58,23 @@ env = Environment(
|
||||
loader=FunctionLoader(db_template_loader),
|
||||
comment_start_string="{=",
|
||||
comment_end_string="=}",
|
||||
extensions=["jinja2.ext.do", "jinja2.ext.loopcontrols"],
|
||||
)
|
||||
|
||||
|
||||
custom_globals = {
|
||||
"datetime": datetime,
|
||||
"ZoneInfo": ZoneInfo,
|
||||
"re": re,
|
||||
}
|
||||
|
||||
env.globals.update(custom_globals)
|
||||
|
||||
# import all functions from custom_filters.py
|
||||
for name, func in inspect.getmembers(custom_filters, inspect.isfunction):
|
||||
env.filters[name] = func
|
||||
|
||||
|
||||
def generate_pdf(*, html: str, css: str = "") -> bytes:
|
||||
font_config = FontConfiguration()
|
||||
|
||||
@@ -307,45 +322,46 @@ def build_queryset(*, data_source: Dict[str, Any], limit: Optional[int] = None)
|
||||
queryset = queryset.first()
|
||||
|
||||
if fields_to_add:
|
||||
return add_custom_fields(
|
||||
queryset = add_custom_fields(
|
||||
data=queryset,
|
||||
fields_to_add=fields_to_add,
|
||||
model_name=model_name,
|
||||
dict_value=True,
|
||||
)
|
||||
else:
|
||||
if isJson:
|
||||
return json.dumps(queryset, default=str)
|
||||
elif isCsv:
|
||||
import pandas as pd
|
||||
|
||||
df = pd.DataFrame.from_dict([queryset])
|
||||
df.drop("id", axis=1, inplace=True)
|
||||
if csv_columns:
|
||||
df = df.rename(columns=csv_columns)
|
||||
return df.to_csv(index=False)
|
||||
else:
|
||||
return queryset
|
||||
if isJson:
|
||||
return json.dumps(queryset, default=str)
|
||||
elif isCsv:
|
||||
import pandas as pd
|
||||
|
||||
df = pd.DataFrame.from_dict([queryset])
|
||||
df.drop("id", axis=1, inplace=True)
|
||||
if csv_columns:
|
||||
df = df.rename(columns=csv_columns)
|
||||
return df.to_csv(index=False)
|
||||
else:
|
||||
return queryset
|
||||
else:
|
||||
# add custom fields for list results
|
||||
if fields_to_add:
|
||||
return add_custom_fields(
|
||||
data=list(queryset), fields_to_add=fields_to_add, model_name=model_name
|
||||
)
|
||||
else:
|
||||
if isJson:
|
||||
return json.dumps(list(queryset), default=str)
|
||||
elif isCsv:
|
||||
import pandas as pd
|
||||
queryset = list(queryset)
|
||||
|
||||
df = pd.DataFrame.from_dict(list(queryset))
|
||||
df.drop("id", axis=1, inplace=True)
|
||||
print(csv_columns)
|
||||
if csv_columns:
|
||||
df = df.rename(columns=csv_columns)
|
||||
return df.to_csv(index=False)
|
||||
else:
|
||||
return list(queryset)
|
||||
if fields_to_add:
|
||||
queryset = add_custom_fields(
|
||||
data=queryset, fields_to_add=fields_to_add, model_name=model_name
|
||||
)
|
||||
|
||||
if isJson:
|
||||
return json.dumps(queryset, default=str)
|
||||
elif isCsv:
|
||||
import pandas as pd
|
||||
|
||||
df = pd.DataFrame.from_dict(queryset)
|
||||
df.drop("id", axis=1, inplace=True)
|
||||
if csv_columns:
|
||||
df = df.rename(columns=csv_columns)
|
||||
return df.to_csv(index=False)
|
||||
else:
|
||||
return queryset
|
||||
|
||||
|
||||
def add_custom_fields(
|
||||
|
||||
@@ -130,9 +130,9 @@ class GenerateReport(APIView):
|
||||
template=template.template_md,
|
||||
template_type=template.type,
|
||||
css=template.template_css or "",
|
||||
html_template=template.template_html.id
|
||||
if template.template_html
|
||||
else None,
|
||||
html_template=(
|
||||
template.template_html.id if template.template_html else None
|
||||
),
|
||||
variables=template.template_variables,
|
||||
dependencies=request.data["dependencies"],
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user