Compare commits
187 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f44671acd | ||
|
|
b5eed69712 | ||
|
|
b79aacb2a7 | ||
|
|
8a2eb7b058 | ||
|
|
7316d076a2 | ||
|
|
479d3bcb40 | ||
|
|
f2358f1530 | ||
|
|
47d9e1b966 | ||
|
|
c53657d693 | ||
|
|
f19ce59e00 | ||
|
|
076f3e05d6 | ||
|
|
7d017f9494 | ||
|
|
675de4e420 | ||
|
|
418a709c6c | ||
|
|
1d7dd1b754 | ||
|
|
3fa70d6d2b | ||
|
|
9c67f52161 | ||
|
|
9f2f23fa96 | ||
|
|
46d955691a | ||
|
|
3f8800187d | ||
|
|
ebbe90dfa8 | ||
|
|
074f898160 | ||
|
|
a0e1783e18 | ||
|
|
fc83e11d8b | ||
|
|
f43627b170 | ||
|
|
8964441f44 | ||
|
|
cfd7a0c621 | ||
|
|
15a422873e | ||
|
|
d1f5583cd7 | ||
|
|
08f07c6f3e | ||
|
|
35a08debc3 | ||
|
|
a3424c480f | ||
|
|
118ced0a43 | ||
|
|
6d355ef0cd | ||
|
|
a8aa5ac231 | ||
|
|
df6bc0b3c9 | ||
|
|
6b965b765c | ||
|
|
d7aea6b5ba | ||
|
|
1e9a46855d | ||
|
|
91e9c18110 | ||
|
|
8ffa6088d7 | ||
|
|
52d2f8364f | ||
|
|
1f679af6fa | ||
|
|
1ba92cdcd5 | ||
|
|
45c60ba5f5 | ||
|
|
d3eef45608 | ||
|
|
1960c113d4 | ||
|
|
63d6b4a1c9 | ||
|
|
9f47bb1252 | ||
|
|
df4fea31d0 | ||
|
|
98ef1484c8 | ||
|
|
c4ef9960b9 | ||
|
|
6b6f7744aa | ||
|
|
9192fa0fe2 | ||
|
|
3c7c2dc1a5 | ||
|
|
5c176a1af0 | ||
|
|
6d03a1cc76 | ||
|
|
1cf10edef1 | ||
|
|
6a97c63bf4 | ||
|
|
15f9612bfa | ||
|
|
9a7c90b194 | ||
|
|
91f2708a87 | ||
|
|
7bf3ecd89d | ||
|
|
4768581631 | ||
|
|
aa4cd10e13 | ||
|
|
066396916d | ||
|
|
34ae57e6fe | ||
|
|
107c2b50e2 | ||
|
|
a832765203 | ||
|
|
977fee82b5 | ||
|
|
8c74cbc1c6 | ||
|
|
b38eec5039 | ||
|
|
6c20b932fa | ||
|
|
deb24c638f | ||
|
|
40fcdb4d28 | ||
|
|
f3e44cf458 | ||
|
|
498748217d | ||
|
|
483bf331fa | ||
|
|
9d62b4acdd | ||
|
|
c9deef6e76 | ||
|
|
8ba6f8b0e1 | ||
|
|
824cbdc84b | ||
|
|
448c59ea88 | ||
|
|
91b858bf33 | ||
|
|
c12bede980 | ||
|
|
71e9fa3d16 | ||
|
|
6800b9aaae | ||
|
|
77d44f25f9 | ||
|
|
ab6227828b | ||
|
|
719ba56c59 | ||
|
|
dacedf4018 | ||
|
|
2526fa3c47 | ||
|
|
7e2295c382 | ||
|
|
6ef02004ff | ||
|
|
0e60d062e9 | ||
|
|
80a94f97c4 | ||
|
|
c18bc5fe67 | ||
|
|
02b98a2429 | ||
|
|
0383aeaa87 | ||
|
|
15a41d532e | ||
|
|
0f49725789 | ||
|
|
1db6733e66 | ||
|
|
0343ee4f6b | ||
|
|
2c37d2233a | ||
|
|
0cb8ccfddd | ||
|
|
41c0e85d00 | ||
|
|
35b1a39ed8 | ||
|
|
61a577ba70 | ||
|
|
a1e32584fa | ||
|
|
28e0ee536d | ||
|
|
9d64a9c038 | ||
|
|
702ba969c2 | ||
|
|
6dde8ee2b8 | ||
|
|
018420310c | ||
|
|
6d49d34033 | ||
|
|
1fbd403164 | ||
|
|
13f544d2be | ||
|
|
3c9e64de81 | ||
|
|
5a9bafbc32 | ||
|
|
b89d96b66f | ||
|
|
b7176191ac | ||
|
|
453c5f47c2 | ||
|
|
eea62e1263 | ||
|
|
4fb2a0f1ca | ||
|
|
1d102ef096 | ||
|
|
bf3c65778e | ||
|
|
df7fe3e6b4 | ||
|
|
b657468b62 | ||
|
|
4edc0058d3 | ||
|
|
2c3b35293b | ||
|
|
be0c9a4d46 | ||
|
|
dd4140558e | ||
|
|
71c2519b8e | ||
|
|
badfc26aed | ||
|
|
b2bc3adb3d | ||
|
|
5ccf408fd6 | ||
|
|
da185875bb | ||
|
|
af16912541 | ||
|
|
1bf9e2a5e6 | ||
|
|
5a572651ff | ||
|
|
5a191e387f | ||
|
|
18f29f5790 | ||
|
|
054a73e0f8 | ||
|
|
14824db7b0 | ||
|
|
721c48ea88 | ||
|
|
ed7bfcfb58 | ||
|
|
773a40a126 | ||
|
|
961252ef26 | ||
|
|
a2650f3c47 | ||
|
|
d71ee194e1 | ||
|
|
22e1a4cf41 | ||
|
|
a50bf901d3 | ||
|
|
c9469635b5 | ||
|
|
36df3278e5 | ||
|
|
cb2258aaa8 | ||
|
|
0391d9eb7e | ||
|
|
12698b4c20 | ||
|
|
f7b9d459ab | ||
|
|
65ab14e68b | ||
|
|
93a5dd5de4 | ||
|
|
61807bdaaa | ||
|
|
a1a5d1adba | ||
|
|
9dd4aefea5 | ||
|
|
db4540089a | ||
|
|
24c899c91a | ||
|
|
ade1a73966 | ||
|
|
fb9ec2b040 | ||
|
|
3a683812e9 | ||
|
|
6d317603c9 | ||
|
|
5a3d2d196c | ||
|
|
e740c4d980 | ||
|
|
253e4596e2 | ||
|
|
70e75a355c | ||
|
|
4f885c9a79 | ||
|
|
b519d2afac | ||
|
|
6b61e3b76b | ||
|
|
30b9c72c31 | ||
|
|
385bf74f6e | ||
|
|
be5615e530 | ||
|
|
d81a03c093 | ||
|
|
f8249c8267 | ||
|
|
5a1cbdcd3b | ||
|
|
e0c99d87bd | ||
|
|
548250029d | ||
|
|
66a354dbdc | ||
|
|
87d05223af | ||
|
|
babf6366e8 |
@@ -1,11 +1,11 @@
|
||||
# pulls community scripts from git repo
|
||||
FROM python:3.10.6-slim AS GET_SCRIPTS_STAGE
|
||||
FROM python:3.11.2-slim AS GET_SCRIPTS_STAGE
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends git && \
|
||||
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
||||
|
||||
FROM python:3.10.6-slim
|
||||
FROM python:3.11.2-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
4
.github/FUNDING.yml
vendored
4
.github/FUNDING.yml
vendored
@@ -1,9 +1,9 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: wh1te909
|
||||
github: amidaware
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: tacticalrmm
|
||||
ko_fi: # tacticalrmm
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
|
||||
13
.github/workflows/ci-tests.yml
vendored
13
.github/workflows/ci-tests.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
name: Tests
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10.6"]
|
||||
python-version: ["3.11.2"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -27,9 +27,10 @@ jobs:
|
||||
postgresql password: "pipeline123456"
|
||||
|
||||
- name: Setup Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
|
||||
- name: Install redis
|
||||
run: |
|
||||
@@ -56,6 +57,14 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Lint with flake8
|
||||
working-directory: api/tacticalrmm
|
||||
run: |
|
||||
flake8 --config .flake8 .
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Run django tests
|
||||
env:
|
||||
GHACTIONS: "yes"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -56,3 +56,4 @@ daphne.sock.lock
|
||||
.pytest_cache
|
||||
coverage.xml
|
||||
setup_dev.yml
|
||||
11env/
|
||||
|
||||
13
.vscode/settings.json
vendored
13
.vscode/settings.json
vendored
@@ -1,7 +1,10 @@
|
||||
{
|
||||
"python.defaultInterpreterPath": "api/tacticalrmm/env/bin/python",
|
||||
"python.defaultInterpreterPath": "api/env/bin/python",
|
||||
"python.languageServer": "Pylance",
|
||||
"python.analysis.extraPaths": ["api/tacticalrmm", "api/env"],
|
||||
"python.analysis.extraPaths": [
|
||||
"api/tacticalrmm",
|
||||
"api/env"
|
||||
],
|
||||
"python.analysis.diagnosticSeverityOverrides": {
|
||||
"reportUnusedImport": "error",
|
||||
"reportDuplicateImport": "error",
|
||||
@@ -22,7 +25,9 @@
|
||||
"**env/**"
|
||||
],
|
||||
"python.formatting.provider": "black",
|
||||
"mypy.targets": ["api/tacticalrmm"],
|
||||
"mypy.targets": [
|
||||
"api/tacticalrmm"
|
||||
],
|
||||
"mypy.runUsingActiveInterpreter": true,
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": true,
|
||||
@@ -70,4 +75,4 @@
|
||||
"completeUnimported": true,
|
||||
"staticcheck": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -35,6 +35,9 @@ Demo database resets every hour. A lot of features are disabled for obvious reas
|
||||
## Linux agent versions supported
|
||||
- Any distro with systemd which includes but is not limited to: Debian (10, 11), Ubuntu x86_64 (18.04, 20.04, 22.04), Synology 7, centos, freepbx and more!
|
||||
|
||||
## Mac agent versions supported
|
||||
- 64 bit Intel and Apple Silicon (M1, M2)
|
||||
|
||||
## Installation / Backup / Restore / Usage
|
||||
|
||||
### Refer to the [documentation](https://docs.tacticalrmm.com)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
user: "tactical"
|
||||
python_ver: "3.10.6"
|
||||
go_ver: "1.18.5"
|
||||
python_ver: "3.11.2"
|
||||
go_ver: "1.19.7"
|
||||
backend_repo: "https://github.com/amidaware/tacticalrmm.git"
|
||||
frontend_repo: "https://github.com/amidaware/tacticalrmm-web.git"
|
||||
scripts_repo: "https://github.com/amidaware/community-scripts.git"
|
||||
|
||||
@@ -407,7 +407,7 @@
|
||||
tags: pip
|
||||
ansible.builtin.shell:
|
||||
chdir: "{{ backend_dir }}/api"
|
||||
cmd: python3.10 -m venv env
|
||||
cmd: python3.11 -m venv env
|
||||
|
||||
- name: update pip to latest
|
||||
tags: pip
|
||||
|
||||
@@ -2,6 +2,10 @@ SECRET_KEY = "{{ django_secret }}"
|
||||
DEBUG = True
|
||||
ALLOWED_HOSTS = ['{{ api }}']
|
||||
ADMIN_URL = "admin/"
|
||||
CORS_ORIGIN_WHITELIST = [
|
||||
"http://{{ rmm }}:8080",
|
||||
"https://{{ rmm }}:8080",
|
||||
]
|
||||
CORS_ORIGIN_ALLOW_ALL = True
|
||||
DATABASES = {
|
||||
'default': {
|
||||
|
||||
12
api/tacticalrmm/.flake8
Normal file
12
api/tacticalrmm/.flake8
Normal file
@@ -0,0 +1,12 @@
|
||||
[flake8]
|
||||
ignore = E501,W503,E722,E203
|
||||
exclude =
|
||||
.mypy*
|
||||
.pytest*
|
||||
.git
|
||||
demo_data.py
|
||||
manage.py
|
||||
*/__pycache__/*
|
||||
*/env/*
|
||||
/usr/local/lib/*
|
||||
**/migrations/*
|
||||
@@ -1,10 +1,10 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import pyotp
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
from tacticalrmm.helpers import get_webdomain
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -21,28 +21,13 @@ class Command(BaseCommand):
|
||||
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||
return
|
||||
|
||||
domain = "Tactical RMM"
|
||||
nginx = "/etc/nginx/sites-available/frontend.conf"
|
||||
found = None
|
||||
if os.path.exists(nginx):
|
||||
try:
|
||||
with open(nginx, "r") as f:
|
||||
for line in f:
|
||||
if "server_name" in line:
|
||||
found = line
|
||||
break
|
||||
|
||||
if found:
|
||||
rep = found.replace("server_name", "").replace(";", "")
|
||||
domain = "".join(rep.split())
|
||||
except:
|
||||
pass
|
||||
|
||||
code = pyotp.random_base32()
|
||||
user.totp_key = code
|
||||
user.save(update_fields=["totp_key"])
|
||||
|
||||
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
||||
url = pyotp.totp.TOTP(code).provisioning_uri(
|
||||
username, issuer_name=get_webdomain()
|
||||
)
|
||||
subprocess.run(f'qr "{url}"', shell=True)
|
||||
self.stdout.write(
|
||||
self.style.WARNING("Scan the barcode above with your authenticator app")
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from getpass import getpass
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
@@ -17,7 +19,13 @@ class Command(BaseCommand):
|
||||
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||
return
|
||||
|
||||
passwd = input("Enter new password: ")
|
||||
user.set_password(passwd)
|
||||
pass1, pass2 = "foo", "bar"
|
||||
while pass1 != pass2:
|
||||
pass1 = getpass()
|
||||
pass2 = getpass(prompt="Confirm Password:")
|
||||
if pass1 != pass2:
|
||||
self.stdout.write(self.style.ERROR("Passwords don't match"))
|
||||
|
||||
user.set_password(pass1)
|
||||
user.save()
|
||||
self.stdout.write(self.style.SUCCESS(f"Password for {username} was reset!"))
|
||||
|
||||
@@ -185,7 +185,6 @@ class Role(BaseAuditModel):
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs) -> None:
|
||||
|
||||
# delete cache on save
|
||||
cache.delete(f"{ROLE_CACHE_PREFIX}{self.name}")
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
@@ -7,32 +7,31 @@ class AccountsPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_accounts")
|
||||
else:
|
||||
|
||||
# allow users to reset their own password/2fa see issue #686
|
||||
base_path = "/accounts/users/"
|
||||
paths = ["reset/", "reset_totp/"]
|
||||
# allow users to reset their own password/2fa see issue #686
|
||||
base_path = "/accounts/users/"
|
||||
paths = ("reset/", "reset_totp/")
|
||||
|
||||
if r.path in [base_path + i for i in paths]:
|
||||
from accounts.models import User
|
||||
if r.path in [base_path + i for i in paths]:
|
||||
from accounts.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(pk=r.data["id"])
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
else:
|
||||
if user == r.user:
|
||||
return True
|
||||
try:
|
||||
user = User.objects.get(pk=r.data["id"])
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
else:
|
||||
if user == r.user:
|
||||
return True
|
||||
|
||||
return _has_perm(r, "can_manage_accounts")
|
||||
return _has_perm(r, "can_manage_accounts")
|
||||
|
||||
|
||||
class RolesPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_roles")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_roles")
|
||||
|
||||
return _has_perm(r, "can_manage_roles")
|
||||
|
||||
|
||||
class APIKeyPerms(permissions.BasePermission):
|
||||
|
||||
@@ -45,7 +45,6 @@ class UserSerializer(ModelSerializer):
|
||||
|
||||
|
||||
class TOTPSetupSerializer(ModelSerializer):
|
||||
|
||||
qr_url = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
@@ -80,7 +79,6 @@ class RoleAuditSerializer(ModelSerializer):
|
||||
|
||||
|
||||
class APIKeySerializer(ModelSerializer):
|
||||
|
||||
username = ReadOnlyField(source="user.username")
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -197,7 +197,7 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
url = f"/accounts/893452/users/"
|
||||
url = "/accounts/893452/users/"
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
@@ -297,6 +297,27 @@ class TestUserAction(TacticalTestCase):
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestUserReset(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_reset_pw(self):
|
||||
url = "/accounts/resetpw/"
|
||||
data = {"password": "superSekret123456"}
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_reset_2fa(self):
|
||||
url = "/accounts/reset2fa/"
|
||||
r = self.client.put(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
|
||||
class TestAPIKeyViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
@@ -13,4 +13,6 @@ urlpatterns = [
|
||||
path("roles/<int:pk>/", views.GetUpdateDeleteRole.as_view()),
|
||||
path("apikeys/", views.GetAddAPIKeys.as_view()),
|
||||
path("apikeys/<int:pk>/", views.GetUpdateDeleteAPIKey.as_view()),
|
||||
path("resetpw/", views.ResetPass.as_view()),
|
||||
path("reset2fa/", views.Reset2FA.as_view()),
|
||||
]
|
||||
|
||||
@@ -26,11 +26,9 @@ from accounts.utils import is_root_user
|
||||
|
||||
|
||||
class CheckCreds(KnoxLoginView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def post(self, request, format=None):
|
||||
|
||||
# check credentials
|
||||
serializer = AuthTokenSerializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
@@ -55,7 +53,6 @@ class CheckCreds(KnoxLoginView):
|
||||
|
||||
|
||||
class LoginView(KnoxLoginView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def post(self, request, format=None):
|
||||
@@ -169,6 +166,7 @@ class GetUpdateDeleteUser(APIView):
|
||||
|
||||
class UserActions(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
# reset password
|
||||
def post(self, request):
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
@@ -195,10 +193,8 @@ class UserActions(APIView):
|
||||
|
||||
|
||||
class TOTPSetup(APIView):
|
||||
|
||||
# totp setup
|
||||
def post(self, request):
|
||||
|
||||
user = request.user
|
||||
if not user.totp_key:
|
||||
code = pyotp.random_base32()
|
||||
@@ -267,7 +263,7 @@ class GetAddAPIKeys(APIView):
|
||||
request.data["key"] = get_random_string(length=32).upper()
|
||||
serializer = APIKeySerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
serializer.save()
|
||||
return Response("The API Key was added")
|
||||
|
||||
|
||||
@@ -290,3 +286,23 @@ class GetUpdateDeleteAPIKey(APIView):
|
||||
apikey = get_object_or_404(APIKey, pk=pk)
|
||||
apikey.delete()
|
||||
return Response("The API Key was deleted")
|
||||
|
||||
|
||||
class ResetPass(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def put(self, request):
|
||||
user = request.user
|
||||
user.set_password(request.data["password"])
|
||||
user.save()
|
||||
return Response("Password was reset.")
|
||||
|
||||
|
||||
class Reset2FA(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def put(self, request):
|
||||
user = request.user
|
||||
user.totp_key = ""
|
||||
user.save()
|
||||
return Response("2FA was reset. Log out and back in to setup.")
|
||||
|
||||
@@ -9,7 +9,6 @@ from tacticalrmm.permissions import _has_perm_on_agent
|
||||
|
||||
class SendCMD(AsyncJsonWebsocketConsumer):
|
||||
async def connect(self):
|
||||
|
||||
self.user = self.scope["user"]
|
||||
|
||||
if isinstance(self.user, AnonymousUser):
|
||||
|
||||
@@ -10,7 +10,7 @@ from tacticalrmm.utils import reload_nats
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Delete old agents"
|
||||
help = "Delete multiple agents based on criteria"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
@@ -23,6 +23,16 @@ class Command(BaseCommand):
|
||||
type=str,
|
||||
help="Delete agents that equal to or less than this version",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--site",
|
||||
type=str,
|
||||
help="Delete agents that belong to the specified site",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--client",
|
||||
type=str,
|
||||
help="Delete agents that belong to the specified client",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--delete",
|
||||
action="store_true",
|
||||
@@ -32,11 +42,15 @@ class Command(BaseCommand):
|
||||
def handle(self, *args, **kwargs):
|
||||
days = kwargs["days"]
|
||||
agentver = kwargs["agentver"]
|
||||
site = kwargs["site"]
|
||||
client = kwargs["client"]
|
||||
delete = kwargs["delete"]
|
||||
|
||||
if not days and not agentver:
|
||||
if not days and not agentver and not site and not client:
|
||||
self.stdout.write(
|
||||
self.style.ERROR("Must have at least one parameter: days or agentver")
|
||||
self.style.ERROR(
|
||||
"Must have at least one parameter: days, agentver, site, or client"
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
@@ -50,6 +64,12 @@ class Command(BaseCommand):
|
||||
if agentver:
|
||||
agents = [i for i in q if pyver.parse(i.version) <= pyver.parse(agentver)]
|
||||
|
||||
if site:
|
||||
agents = [i for i in q if i.site.name == site]
|
||||
|
||||
if client:
|
||||
agents = [i for i in q if i.client.name == client]
|
||||
|
||||
if not agents:
|
||||
self.stdout.write(self.style.ERROR("No agents matched"))
|
||||
return
|
||||
@@ -64,7 +84,7 @@ class Command(BaseCommand):
|
||||
try:
|
||||
agent.delete()
|
||||
except Exception as e:
|
||||
err = f"Failed to delete agent {agent.hostname}: {str(e)}"
|
||||
err = f"Failed to delete agent {agent.hostname}: {e}"
|
||||
self.stdout.write(self.style.ERROR(err))
|
||||
else:
|
||||
deleted_count += 1
|
||||
|
||||
@@ -5,14 +5,13 @@ from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from agents.models import Agent
|
||||
from core.tasks import cache_db_fields_task, handle_resolved_stuff
|
||||
from core.tasks import cache_db_fields_task
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "stuff for demo site in cron"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
random_dates = []
|
||||
now = djangotime.now()
|
||||
|
||||
@@ -30,4 +29,3 @@ class Command(BaseCommand):
|
||||
agent.save(update_fields=["last_seen"])
|
||||
|
||||
cache_db_fields_task()
|
||||
handle_resolved_stuff()
|
||||
|
||||
@@ -27,6 +27,7 @@ from tacticalrmm.constants import (
|
||||
EvtLogFailWhen,
|
||||
EvtLogNames,
|
||||
EvtLogTypes,
|
||||
GoArch,
|
||||
PAAction,
|
||||
ScriptShell,
|
||||
TaskSyncStatus,
|
||||
@@ -47,10 +48,12 @@ from tacticalrmm.demo_data import (
|
||||
temp_dir_stdout,
|
||||
wmi_deb,
|
||||
wmi_pi,
|
||||
wmi_mac,
|
||||
disks_mac,
|
||||
)
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
AGENTS_TO_GENERATE = 20
|
||||
AGENTS_TO_GENERATE = 250
|
||||
|
||||
SVCS = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winsvcs.json")
|
||||
WMI_1 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi1.json")
|
||||
@@ -72,7 +75,6 @@ class Command(BaseCommand):
|
||||
return "".join(random.choice(chars) for _ in range(length))
|
||||
|
||||
def handle(self, *args, **kwargs) -> None:
|
||||
|
||||
user = User.objects.first()
|
||||
if user:
|
||||
user.totp_key = "ABSA234234"
|
||||
@@ -177,6 +179,8 @@ class Command(BaseCommand):
|
||||
"WSUS",
|
||||
"DESKTOP-12345",
|
||||
"LAPTOP-55443",
|
||||
"db-aws-01",
|
||||
"Karens-MacBook-Air.local",
|
||||
)
|
||||
descriptions = ("Bob's computer", "Primary DC", "File Server", "Karen's Laptop")
|
||||
modes = AgentMonType.values
|
||||
@@ -194,6 +198,7 @@ class Command(BaseCommand):
|
||||
|
||||
linux_deb_os = "Debian 11.2 x86_64 5.10.0-11-amd64"
|
||||
linux_pi_os = "Raspbian 11.2 armv7l 5.10.92-v7+"
|
||||
mac_os = "Darwin 12.5.1 arm64 21.6.0"
|
||||
|
||||
public_ips = ("65.234.22.4", "74.123.43.5", "44.21.134.45")
|
||||
|
||||
@@ -289,7 +294,6 @@ class Command(BaseCommand):
|
||||
show_tmp_dir_script.save()
|
||||
|
||||
for count_agents in range(AGENTS_TO_GENERATE):
|
||||
|
||||
client = random.choice(clients)
|
||||
|
||||
if client == clients[0]:
|
||||
@@ -313,18 +317,25 @@ class Command(BaseCommand):
|
||||
mode = AgentMonType.SERVER
|
||||
# pi arm
|
||||
if plat_pick == 7:
|
||||
agent.goarch = "arm"
|
||||
agent.goarch = GoArch.ARM32
|
||||
agent.wmi_detail = wmi_pi
|
||||
agent.disks = disks_linux_pi
|
||||
agent.operating_system = linux_pi_os
|
||||
else:
|
||||
agent.goarch = "amd64"
|
||||
agent.goarch = GoArch.AMD64
|
||||
agent.wmi_detail = wmi_deb
|
||||
agent.disks = disks_linux_deb
|
||||
agent.operating_system = linux_deb_os
|
||||
elif plat_pick in (4, 14):
|
||||
agent.plat = AgentPlat.DARWIN
|
||||
mode = random.choice([AgentMonType.SERVER, AgentMonType.WORKSTATION])
|
||||
agent.goarch = GoArch.ARM64
|
||||
agent.wmi_detail = wmi_mac
|
||||
agent.disks = disks_mac
|
||||
agent.operating_system = mac_os
|
||||
else:
|
||||
agent.plat = AgentPlat.WINDOWS
|
||||
agent.goarch = "amd64"
|
||||
agent.goarch = GoArch.AMD64
|
||||
mode = random.choice(modes)
|
||||
agent.wmi_detail = random.choice(wmi_details)
|
||||
agent.services = services
|
||||
@@ -334,8 +345,8 @@ class Command(BaseCommand):
|
||||
else:
|
||||
agent.operating_system = random.choice(op_systems_workstations)
|
||||
|
||||
agent.hostname = random.choice(hostnames)
|
||||
agent.version = settings.LATEST_AGENT_VER
|
||||
agent.hostname = random.choice(hostnames)
|
||||
agent.site = Site.objects.get(name=site)
|
||||
agent.agent_id = self.rand_string(40)
|
||||
agent.description = random.choice(descriptions)
|
||||
@@ -810,7 +821,6 @@ class Command(BaseCommand):
|
||||
pick = random.randint(1, 10)
|
||||
|
||||
if pick == 5 or pick == 3:
|
||||
|
||||
reboot_time = django_now + djangotime.timedelta(
|
||||
minutes=random.randint(1000, 500000)
|
||||
)
|
||||
|
||||
631
api/tacticalrmm/agents/migrations/0056_alter_agent_time_zone.py
Normal file
631
api/tacticalrmm/agents/migrations/0056_alter_agent_time_zone.py
Normal file
@@ -0,0 +1,631 @@
|
||||
# Generated by Django 4.1.7 on 2023-02-28 22:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("agents", "0055_alter_agent_time_zone"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="agent",
|
||||
name="time_zone",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("Africa/Abidjan", "Africa/Abidjan"),
|
||||
("Africa/Accra", "Africa/Accra"),
|
||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
||||
("Africa/Algiers", "Africa/Algiers"),
|
||||
("Africa/Asmara", "Africa/Asmara"),
|
||||
("Africa/Asmera", "Africa/Asmera"),
|
||||
("Africa/Bamako", "Africa/Bamako"),
|
||||
("Africa/Bangui", "Africa/Bangui"),
|
||||
("Africa/Banjul", "Africa/Banjul"),
|
||||
("Africa/Bissau", "Africa/Bissau"),
|
||||
("Africa/Blantyre", "Africa/Blantyre"),
|
||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
||||
("Africa/Cairo", "Africa/Cairo"),
|
||||
("Africa/Casablanca", "Africa/Casablanca"),
|
||||
("Africa/Ceuta", "Africa/Ceuta"),
|
||||
("Africa/Conakry", "Africa/Conakry"),
|
||||
("Africa/Dakar", "Africa/Dakar"),
|
||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
||||
("Africa/Djibouti", "Africa/Djibouti"),
|
||||
("Africa/Douala", "Africa/Douala"),
|
||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
||||
("Africa/Freetown", "Africa/Freetown"),
|
||||
("Africa/Gaborone", "Africa/Gaborone"),
|
||||
("Africa/Harare", "Africa/Harare"),
|
||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
||||
("Africa/Juba", "Africa/Juba"),
|
||||
("Africa/Kampala", "Africa/Kampala"),
|
||||
("Africa/Khartoum", "Africa/Khartoum"),
|
||||
("Africa/Kigali", "Africa/Kigali"),
|
||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
||||
("Africa/Lagos", "Africa/Lagos"),
|
||||
("Africa/Libreville", "Africa/Libreville"),
|
||||
("Africa/Lome", "Africa/Lome"),
|
||||
("Africa/Luanda", "Africa/Luanda"),
|
||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
||||
("Africa/Lusaka", "Africa/Lusaka"),
|
||||
("Africa/Malabo", "Africa/Malabo"),
|
||||
("Africa/Maputo", "Africa/Maputo"),
|
||||
("Africa/Maseru", "Africa/Maseru"),
|
||||
("Africa/Mbabane", "Africa/Mbabane"),
|
||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
||||
("Africa/Monrovia", "Africa/Monrovia"),
|
||||
("Africa/Nairobi", "Africa/Nairobi"),
|
||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
||||
("Africa/Niamey", "Africa/Niamey"),
|
||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
||||
("Africa/Tripoli", "Africa/Tripoli"),
|
||||
("Africa/Tunis", "Africa/Tunis"),
|
||||
("Africa/Windhoek", "Africa/Windhoek"),
|
||||
("America/Adak", "America/Adak"),
|
||||
("America/Anchorage", "America/Anchorage"),
|
||||
("America/Anguilla", "America/Anguilla"),
|
||||
("America/Antigua", "America/Antigua"),
|
||||
("America/Araguaina", "America/Araguaina"),
|
||||
(
|
||||
"America/Argentina/Buenos_Aires",
|
||||
"America/Argentina/Buenos_Aires",
|
||||
),
|
||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
||||
(
|
||||
"America/Argentina/ComodRivadavia",
|
||||
"America/Argentina/ComodRivadavia",
|
||||
),
|
||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
||||
(
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
),
|
||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
||||
("America/Aruba", "America/Aruba"),
|
||||
("America/Asuncion", "America/Asuncion"),
|
||||
("America/Atikokan", "America/Atikokan"),
|
||||
("America/Atka", "America/Atka"),
|
||||
("America/Bahia", "America/Bahia"),
|
||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
||||
("America/Barbados", "America/Barbados"),
|
||||
("America/Belem", "America/Belem"),
|
||||
("America/Belize", "America/Belize"),
|
||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
||||
("America/Boa_Vista", "America/Boa_Vista"),
|
||||
("America/Bogota", "America/Bogota"),
|
||||
("America/Boise", "America/Boise"),
|
||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
||||
("America/Campo_Grande", "America/Campo_Grande"),
|
||||
("America/Cancun", "America/Cancun"),
|
||||
("America/Caracas", "America/Caracas"),
|
||||
("America/Catamarca", "America/Catamarca"),
|
||||
("America/Cayenne", "America/Cayenne"),
|
||||
("America/Cayman", "America/Cayman"),
|
||||
("America/Chicago", "America/Chicago"),
|
||||
("America/Chihuahua", "America/Chihuahua"),
|
||||
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
|
||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
||||
("America/Cordoba", "America/Cordoba"),
|
||||
("America/Costa_Rica", "America/Costa_Rica"),
|
||||
("America/Creston", "America/Creston"),
|
||||
("America/Cuiaba", "America/Cuiaba"),
|
||||
("America/Curacao", "America/Curacao"),
|
||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
||||
("America/Dawson", "America/Dawson"),
|
||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
||||
("America/Denver", "America/Denver"),
|
||||
("America/Detroit", "America/Detroit"),
|
||||
("America/Dominica", "America/Dominica"),
|
||||
("America/Edmonton", "America/Edmonton"),
|
||||
("America/Eirunepe", "America/Eirunepe"),
|
||||
("America/El_Salvador", "America/El_Salvador"),
|
||||
("America/Ensenada", "America/Ensenada"),
|
||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
||||
("America/Fortaleza", "America/Fortaleza"),
|
||||
("America/Glace_Bay", "America/Glace_Bay"),
|
||||
("America/Godthab", "America/Godthab"),
|
||||
("America/Goose_Bay", "America/Goose_Bay"),
|
||||
("America/Grand_Turk", "America/Grand_Turk"),
|
||||
("America/Grenada", "America/Grenada"),
|
||||
("America/Guadeloupe", "America/Guadeloupe"),
|
||||
("America/Guatemala", "America/Guatemala"),
|
||||
("America/Guayaquil", "America/Guayaquil"),
|
||||
("America/Guyana", "America/Guyana"),
|
||||
("America/Halifax", "America/Halifax"),
|
||||
("America/Havana", "America/Havana"),
|
||||
("America/Hermosillo", "America/Hermosillo"),
|
||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
||||
("America/Indianapolis", "America/Indianapolis"),
|
||||
("America/Inuvik", "America/Inuvik"),
|
||||
("America/Iqaluit", "America/Iqaluit"),
|
||||
("America/Jamaica", "America/Jamaica"),
|
||||
("America/Jujuy", "America/Jujuy"),
|
||||
("America/Juneau", "America/Juneau"),
|
||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
||||
("America/Knox_IN", "America/Knox_IN"),
|
||||
("America/Kralendijk", "America/Kralendijk"),
|
||||
("America/La_Paz", "America/La_Paz"),
|
||||
("America/Lima", "America/Lima"),
|
||||
("America/Los_Angeles", "America/Los_Angeles"),
|
||||
("America/Louisville", "America/Louisville"),
|
||||
("America/Lower_Princes", "America/Lower_Princes"),
|
||||
("America/Maceio", "America/Maceio"),
|
||||
("America/Managua", "America/Managua"),
|
||||
("America/Manaus", "America/Manaus"),
|
||||
("America/Marigot", "America/Marigot"),
|
||||
("America/Martinique", "America/Martinique"),
|
||||
("America/Matamoros", "America/Matamoros"),
|
||||
("America/Mazatlan", "America/Mazatlan"),
|
||||
("America/Mendoza", "America/Mendoza"),
|
||||
("America/Menominee", "America/Menominee"),
|
||||
("America/Merida", "America/Merida"),
|
||||
("America/Metlakatla", "America/Metlakatla"),
|
||||
("America/Mexico_City", "America/Mexico_City"),
|
||||
("America/Miquelon", "America/Miquelon"),
|
||||
("America/Moncton", "America/Moncton"),
|
||||
("America/Monterrey", "America/Monterrey"),
|
||||
("America/Montevideo", "America/Montevideo"),
|
||||
("America/Montreal", "America/Montreal"),
|
||||
("America/Montserrat", "America/Montserrat"),
|
||||
("America/Nassau", "America/Nassau"),
|
||||
("America/New_York", "America/New_York"),
|
||||
("America/Nipigon", "America/Nipigon"),
|
||||
("America/Nome", "America/Nome"),
|
||||
("America/Noronha", "America/Noronha"),
|
||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
||||
(
|
||||
"America/North_Dakota/New_Salem",
|
||||
"America/North_Dakota/New_Salem",
|
||||
),
|
||||
("America/Nuuk", "America/Nuuk"),
|
||||
("America/Ojinaga", "America/Ojinaga"),
|
||||
("America/Panama", "America/Panama"),
|
||||
("America/Pangnirtung", "America/Pangnirtung"),
|
||||
("America/Paramaribo", "America/Paramaribo"),
|
||||
("America/Phoenix", "America/Phoenix"),
|
||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
||||
("America/Porto_Acre", "America/Porto_Acre"),
|
||||
("America/Porto_Velho", "America/Porto_Velho"),
|
||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
||||
("America/Rainy_River", "America/Rainy_River"),
|
||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
||||
("America/Recife", "America/Recife"),
|
||||
("America/Regina", "America/Regina"),
|
||||
("America/Resolute", "America/Resolute"),
|
||||
("America/Rio_Branco", "America/Rio_Branco"),
|
||||
("America/Rosario", "America/Rosario"),
|
||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
||||
("America/Santarem", "America/Santarem"),
|
||||
("America/Santiago", "America/Santiago"),
|
||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
||||
("America/Scoresbysund", "America/Scoresbysund"),
|
||||
("America/Shiprock", "America/Shiprock"),
|
||||
("America/Sitka", "America/Sitka"),
|
||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
||||
("America/St_Johns", "America/St_Johns"),
|
||||
("America/St_Kitts", "America/St_Kitts"),
|
||||
("America/St_Lucia", "America/St_Lucia"),
|
||||
("America/St_Thomas", "America/St_Thomas"),
|
||||
("America/St_Vincent", "America/St_Vincent"),
|
||||
("America/Swift_Current", "America/Swift_Current"),
|
||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
||||
("America/Thule", "America/Thule"),
|
||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
||||
("America/Tijuana", "America/Tijuana"),
|
||||
("America/Toronto", "America/Toronto"),
|
||||
("America/Tortola", "America/Tortola"),
|
||||
("America/Vancouver", "America/Vancouver"),
|
||||
("America/Virgin", "America/Virgin"),
|
||||
("America/Whitehorse", "America/Whitehorse"),
|
||||
("America/Winnipeg", "America/Winnipeg"),
|
||||
("America/Yakutat", "America/Yakutat"),
|
||||
("America/Yellowknife", "America/Yellowknife"),
|
||||
("Antarctica/Casey", "Antarctica/Casey"),
|
||||
("Antarctica/Davis", "Antarctica/Davis"),
|
||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
||||
("Antarctica/Troll", "Antarctica/Troll"),
|
||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
||||
("Asia/Aden", "Asia/Aden"),
|
||||
("Asia/Almaty", "Asia/Almaty"),
|
||||
("Asia/Amman", "Asia/Amman"),
|
||||
("Asia/Anadyr", "Asia/Anadyr"),
|
||||
("Asia/Aqtau", "Asia/Aqtau"),
|
||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
||||
("Asia/Atyrau", "Asia/Atyrau"),
|
||||
("Asia/Baghdad", "Asia/Baghdad"),
|
||||
("Asia/Bahrain", "Asia/Bahrain"),
|
||||
("Asia/Baku", "Asia/Baku"),
|
||||
("Asia/Bangkok", "Asia/Bangkok"),
|
||||
("Asia/Barnaul", "Asia/Barnaul"),
|
||||
("Asia/Beirut", "Asia/Beirut"),
|
||||
("Asia/Bishkek", "Asia/Bishkek"),
|
||||
("Asia/Brunei", "Asia/Brunei"),
|
||||
("Asia/Calcutta", "Asia/Calcutta"),
|
||||
("Asia/Chita", "Asia/Chita"),
|
||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
||||
("Asia/Chongqing", "Asia/Chongqing"),
|
||||
("Asia/Chungking", "Asia/Chungking"),
|
||||
("Asia/Colombo", "Asia/Colombo"),
|
||||
("Asia/Dacca", "Asia/Dacca"),
|
||||
("Asia/Damascus", "Asia/Damascus"),
|
||||
("Asia/Dhaka", "Asia/Dhaka"),
|
||||
("Asia/Dili", "Asia/Dili"),
|
||||
("Asia/Dubai", "Asia/Dubai"),
|
||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
||||
("Asia/Famagusta", "Asia/Famagusta"),
|
||||
("Asia/Gaza", "Asia/Gaza"),
|
||||
("Asia/Harbin", "Asia/Harbin"),
|
||||
("Asia/Hebron", "Asia/Hebron"),
|
||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
||||
("Asia/Hovd", "Asia/Hovd"),
|
||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
||||
("Asia/Istanbul", "Asia/Istanbul"),
|
||||
("Asia/Jakarta", "Asia/Jakarta"),
|
||||
("Asia/Jayapura", "Asia/Jayapura"),
|
||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
||||
("Asia/Kabul", "Asia/Kabul"),
|
||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
||||
("Asia/Karachi", "Asia/Karachi"),
|
||||
("Asia/Kashgar", "Asia/Kashgar"),
|
||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
||||
("Asia/Katmandu", "Asia/Katmandu"),
|
||||
("Asia/Khandyga", "Asia/Khandyga"),
|
||||
("Asia/Kolkata", "Asia/Kolkata"),
|
||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
||||
("Asia/Kuching", "Asia/Kuching"),
|
||||
("Asia/Kuwait", "Asia/Kuwait"),
|
||||
("Asia/Macao", "Asia/Macao"),
|
||||
("Asia/Macau", "Asia/Macau"),
|
||||
("Asia/Magadan", "Asia/Magadan"),
|
||||
("Asia/Makassar", "Asia/Makassar"),
|
||||
("Asia/Manila", "Asia/Manila"),
|
||||
("Asia/Muscat", "Asia/Muscat"),
|
||||
("Asia/Nicosia", "Asia/Nicosia"),
|
||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
||||
("Asia/Omsk", "Asia/Omsk"),
|
||||
("Asia/Oral", "Asia/Oral"),
|
||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
||||
("Asia/Pontianak", "Asia/Pontianak"),
|
||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
||||
("Asia/Qatar", "Asia/Qatar"),
|
||||
("Asia/Qostanay", "Asia/Qostanay"),
|
||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
||||
("Asia/Rangoon", "Asia/Rangoon"),
|
||||
("Asia/Riyadh", "Asia/Riyadh"),
|
||||
("Asia/Saigon", "Asia/Saigon"),
|
||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
||||
("Asia/Samarkand", "Asia/Samarkand"),
|
||||
("Asia/Seoul", "Asia/Seoul"),
|
||||
("Asia/Shanghai", "Asia/Shanghai"),
|
||||
("Asia/Singapore", "Asia/Singapore"),
|
||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
||||
("Asia/Taipei", "Asia/Taipei"),
|
||||
("Asia/Tashkent", "Asia/Tashkent"),
|
||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
||||
("Asia/Tehran", "Asia/Tehran"),
|
||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
||||
("Asia/Thimbu", "Asia/Thimbu"),
|
||||
("Asia/Thimphu", "Asia/Thimphu"),
|
||||
("Asia/Tokyo", "Asia/Tokyo"),
|
||||
("Asia/Tomsk", "Asia/Tomsk"),
|
||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
||||
("Asia/Urumqi", "Asia/Urumqi"),
|
||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
||||
("Asia/Vientiane", "Asia/Vientiane"),
|
||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
||||
("Asia/Yangon", "Asia/Yangon"),
|
||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
||||
("Asia/Yerevan", "Asia/Yerevan"),
|
||||
("Atlantic/Azores", "Atlantic/Azores"),
|
||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
||||
("Atlantic/Canary", "Atlantic/Canary"),
|
||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
||||
("Australia/ACT", "Australia/ACT"),
|
||||
("Australia/Adelaide", "Australia/Adelaide"),
|
||||
("Australia/Brisbane", "Australia/Brisbane"),
|
||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
||||
("Australia/Canberra", "Australia/Canberra"),
|
||||
("Australia/Currie", "Australia/Currie"),
|
||||
("Australia/Darwin", "Australia/Darwin"),
|
||||
("Australia/Eucla", "Australia/Eucla"),
|
||||
("Australia/Hobart", "Australia/Hobart"),
|
||||
("Australia/LHI", "Australia/LHI"),
|
||||
("Australia/Lindeman", "Australia/Lindeman"),
|
||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
||||
("Australia/Melbourne", "Australia/Melbourne"),
|
||||
("Australia/NSW", "Australia/NSW"),
|
||||
("Australia/North", "Australia/North"),
|
||||
("Australia/Perth", "Australia/Perth"),
|
||||
("Australia/Queensland", "Australia/Queensland"),
|
||||
("Australia/South", "Australia/South"),
|
||||
("Australia/Sydney", "Australia/Sydney"),
|
||||
("Australia/Tasmania", "Australia/Tasmania"),
|
||||
("Australia/Victoria", "Australia/Victoria"),
|
||||
("Australia/West", "Australia/West"),
|
||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
||||
("Brazil/Acre", "Brazil/Acre"),
|
||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
||||
("Brazil/East", "Brazil/East"),
|
||||
("Brazil/West", "Brazil/West"),
|
||||
("CET", "CET"),
|
||||
("CST6CDT", "CST6CDT"),
|
||||
("Canada/Atlantic", "Canada/Atlantic"),
|
||||
("Canada/Central", "Canada/Central"),
|
||||
("Canada/Eastern", "Canada/Eastern"),
|
||||
("Canada/Mountain", "Canada/Mountain"),
|
||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
||||
("Canada/Pacific", "Canada/Pacific"),
|
||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
||||
("Canada/Yukon", "Canada/Yukon"),
|
||||
("Chile/Continental", "Chile/Continental"),
|
||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
||||
("Cuba", "Cuba"),
|
||||
("EET", "EET"),
|
||||
("EST", "EST"),
|
||||
("EST5EDT", "EST5EDT"),
|
||||
("Egypt", "Egypt"),
|
||||
("Eire", "Eire"),
|
||||
("Etc/GMT", "Etc/GMT"),
|
||||
("Etc/GMT+0", "Etc/GMT+0"),
|
||||
("Etc/GMT+1", "Etc/GMT+1"),
|
||||
("Etc/GMT+10", "Etc/GMT+10"),
|
||||
("Etc/GMT+11", "Etc/GMT+11"),
|
||||
("Etc/GMT+12", "Etc/GMT+12"),
|
||||
("Etc/GMT+2", "Etc/GMT+2"),
|
||||
("Etc/GMT+3", "Etc/GMT+3"),
|
||||
("Etc/GMT+4", "Etc/GMT+4"),
|
||||
("Etc/GMT+5", "Etc/GMT+5"),
|
||||
("Etc/GMT+6", "Etc/GMT+6"),
|
||||
("Etc/GMT+7", "Etc/GMT+7"),
|
||||
("Etc/GMT+8", "Etc/GMT+8"),
|
||||
("Etc/GMT+9", "Etc/GMT+9"),
|
||||
("Etc/GMT-0", "Etc/GMT-0"),
|
||||
("Etc/GMT-1", "Etc/GMT-1"),
|
||||
("Etc/GMT-10", "Etc/GMT-10"),
|
||||
("Etc/GMT-11", "Etc/GMT-11"),
|
||||
("Etc/GMT-12", "Etc/GMT-12"),
|
||||
("Etc/GMT-13", "Etc/GMT-13"),
|
||||
("Etc/GMT-14", "Etc/GMT-14"),
|
||||
("Etc/GMT-2", "Etc/GMT-2"),
|
||||
("Etc/GMT-3", "Etc/GMT-3"),
|
||||
("Etc/GMT-4", "Etc/GMT-4"),
|
||||
("Etc/GMT-5", "Etc/GMT-5"),
|
||||
("Etc/GMT-6", "Etc/GMT-6"),
|
||||
("Etc/GMT-7", "Etc/GMT-7"),
|
||||
("Etc/GMT-8", "Etc/GMT-8"),
|
||||
("Etc/GMT-9", "Etc/GMT-9"),
|
||||
("Etc/GMT0", "Etc/GMT0"),
|
||||
("Etc/Greenwich", "Etc/Greenwich"),
|
||||
("Etc/UCT", "Etc/UCT"),
|
||||
("Etc/UTC", "Etc/UTC"),
|
||||
("Etc/Universal", "Etc/Universal"),
|
||||
("Etc/Zulu", "Etc/Zulu"),
|
||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
||||
("Europe/Andorra", "Europe/Andorra"),
|
||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
||||
("Europe/Athens", "Europe/Athens"),
|
||||
("Europe/Belfast", "Europe/Belfast"),
|
||||
("Europe/Belgrade", "Europe/Belgrade"),
|
||||
("Europe/Berlin", "Europe/Berlin"),
|
||||
("Europe/Bratislava", "Europe/Bratislava"),
|
||||
("Europe/Brussels", "Europe/Brussels"),
|
||||
("Europe/Bucharest", "Europe/Bucharest"),
|
||||
("Europe/Budapest", "Europe/Budapest"),
|
||||
("Europe/Busingen", "Europe/Busingen"),
|
||||
("Europe/Chisinau", "Europe/Chisinau"),
|
||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
||||
("Europe/Dublin", "Europe/Dublin"),
|
||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
||||
("Europe/Guernsey", "Europe/Guernsey"),
|
||||
("Europe/Helsinki", "Europe/Helsinki"),
|
||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
||||
("Europe/Istanbul", "Europe/Istanbul"),
|
||||
("Europe/Jersey", "Europe/Jersey"),
|
||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
||||
("Europe/Kiev", "Europe/Kiev"),
|
||||
("Europe/Kirov", "Europe/Kirov"),
|
||||
("Europe/Kyiv", "Europe/Kyiv"),
|
||||
("Europe/Lisbon", "Europe/Lisbon"),
|
||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
||||
("Europe/London", "Europe/London"),
|
||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
||||
("Europe/Madrid", "Europe/Madrid"),
|
||||
("Europe/Malta", "Europe/Malta"),
|
||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
||||
("Europe/Minsk", "Europe/Minsk"),
|
||||
("Europe/Monaco", "Europe/Monaco"),
|
||||
("Europe/Moscow", "Europe/Moscow"),
|
||||
("Europe/Nicosia", "Europe/Nicosia"),
|
||||
("Europe/Oslo", "Europe/Oslo"),
|
||||
("Europe/Paris", "Europe/Paris"),
|
||||
("Europe/Podgorica", "Europe/Podgorica"),
|
||||
("Europe/Prague", "Europe/Prague"),
|
||||
("Europe/Riga", "Europe/Riga"),
|
||||
("Europe/Rome", "Europe/Rome"),
|
||||
("Europe/Samara", "Europe/Samara"),
|
||||
("Europe/San_Marino", "Europe/San_Marino"),
|
||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
||||
("Europe/Saratov", "Europe/Saratov"),
|
||||
("Europe/Simferopol", "Europe/Simferopol"),
|
||||
("Europe/Skopje", "Europe/Skopje"),
|
||||
("Europe/Sofia", "Europe/Sofia"),
|
||||
("Europe/Stockholm", "Europe/Stockholm"),
|
||||
("Europe/Tallinn", "Europe/Tallinn"),
|
||||
("Europe/Tirane", "Europe/Tirane"),
|
||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
||||
("Europe/Vaduz", "Europe/Vaduz"),
|
||||
("Europe/Vatican", "Europe/Vatican"),
|
||||
("Europe/Vienna", "Europe/Vienna"),
|
||||
("Europe/Vilnius", "Europe/Vilnius"),
|
||||
("Europe/Volgograd", "Europe/Volgograd"),
|
||||
("Europe/Warsaw", "Europe/Warsaw"),
|
||||
("Europe/Zagreb", "Europe/Zagreb"),
|
||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
||||
("Europe/Zurich", "Europe/Zurich"),
|
||||
("GB", "GB"),
|
||||
("GB-Eire", "GB-Eire"),
|
||||
("GMT", "GMT"),
|
||||
("GMT+0", "GMT+0"),
|
||||
("GMT-0", "GMT-0"),
|
||||
("GMT0", "GMT0"),
|
||||
("Greenwich", "Greenwich"),
|
||||
("HST", "HST"),
|
||||
("Hongkong", "Hongkong"),
|
||||
("Iceland", "Iceland"),
|
||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
||||
("Indian/Chagos", "Indian/Chagos"),
|
||||
("Indian/Christmas", "Indian/Christmas"),
|
||||
("Indian/Cocos", "Indian/Cocos"),
|
||||
("Indian/Comoro", "Indian/Comoro"),
|
||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
||||
("Indian/Mahe", "Indian/Mahe"),
|
||||
("Indian/Maldives", "Indian/Maldives"),
|
||||
("Indian/Mauritius", "Indian/Mauritius"),
|
||||
("Indian/Mayotte", "Indian/Mayotte"),
|
||||
("Indian/Reunion", "Indian/Reunion"),
|
||||
("Iran", "Iran"),
|
||||
("Israel", "Israel"),
|
||||
("Jamaica", "Jamaica"),
|
||||
("Japan", "Japan"),
|
||||
("Kwajalein", "Kwajalein"),
|
||||
("Libya", "Libya"),
|
||||
("MET", "MET"),
|
||||
("MST", "MST"),
|
||||
("MST7MDT", "MST7MDT"),
|
||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
||||
("Mexico/General", "Mexico/General"),
|
||||
("NZ", "NZ"),
|
||||
("NZ-CHAT", "NZ-CHAT"),
|
||||
("Navajo", "Navajo"),
|
||||
("PRC", "PRC"),
|
||||
("PST8PDT", "PST8PDT"),
|
||||
("Pacific/Apia", "Pacific/Apia"),
|
||||
("Pacific/Auckland", "Pacific/Auckland"),
|
||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
||||
("Pacific/Chatham", "Pacific/Chatham"),
|
||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
||||
("Pacific/Easter", "Pacific/Easter"),
|
||||
("Pacific/Efate", "Pacific/Efate"),
|
||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
||||
("Pacific/Fiji", "Pacific/Fiji"),
|
||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
||||
("Pacific/Gambier", "Pacific/Gambier"),
|
||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
||||
("Pacific/Guam", "Pacific/Guam"),
|
||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
||||
("Pacific/Johnston", "Pacific/Johnston"),
|
||||
("Pacific/Kanton", "Pacific/Kanton"),
|
||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
||||
("Pacific/Majuro", "Pacific/Majuro"),
|
||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
||||
("Pacific/Midway", "Pacific/Midway"),
|
||||
("Pacific/Nauru", "Pacific/Nauru"),
|
||||
("Pacific/Niue", "Pacific/Niue"),
|
||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
||||
("Pacific/Noumea", "Pacific/Noumea"),
|
||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
||||
("Pacific/Palau", "Pacific/Palau"),
|
||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
||||
("Pacific/Ponape", "Pacific/Ponape"),
|
||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
||||
("Pacific/Saipan", "Pacific/Saipan"),
|
||||
("Pacific/Samoa", "Pacific/Samoa"),
|
||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
||||
("Pacific/Truk", "Pacific/Truk"),
|
||||
("Pacific/Wake", "Pacific/Wake"),
|
||||
("Pacific/Wallis", "Pacific/Wallis"),
|
||||
("Pacific/Yap", "Pacific/Yap"),
|
||||
("Poland", "Poland"),
|
||||
("Portugal", "Portugal"),
|
||||
("ROC", "ROC"),
|
||||
("ROK", "ROK"),
|
||||
("Singapore", "Singapore"),
|
||||
("Turkey", "Turkey"),
|
||||
("UCT", "UCT"),
|
||||
("US/Alaska", "US/Alaska"),
|
||||
("US/Aleutian", "US/Aleutian"),
|
||||
("US/Arizona", "US/Arizona"),
|
||||
("US/Central", "US/Central"),
|
||||
("US/East-Indiana", "US/East-Indiana"),
|
||||
("US/Eastern", "US/Eastern"),
|
||||
("US/Hawaii", "US/Hawaii"),
|
||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
||||
("US/Michigan", "US/Michigan"),
|
||||
("US/Mountain", "US/Mountain"),
|
||||
("US/Pacific", "US/Pacific"),
|
||||
("US/Samoa", "US/Samoa"),
|
||||
("UTC", "UTC"),
|
||||
("Universal", "Universal"),
|
||||
("W-SU", "W-SU"),
|
||||
("WET", "WET"),
|
||||
("Zulu", "Zulu"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import re
|
||||
from collections import Counter
|
||||
from contextlib import suppress
|
||||
from distutils.version import LooseVersion
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, cast
|
||||
|
||||
@@ -17,6 +18,7 @@ from nats.errors import TimeoutError
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.utils import get_agent_url
|
||||
from checks.models import CheckResult
|
||||
from core.models import TZ_CHOICES
|
||||
from core.utils import get_core_settings, send_command_with_mesh
|
||||
from logs.models import BaseAuditModel, DebugLog, PendingAction
|
||||
@@ -24,6 +26,7 @@ from tacticalrmm.constants import (
|
||||
AGENT_STATUS_OFFLINE,
|
||||
AGENT_STATUS_ONLINE,
|
||||
AGENT_STATUS_OVERDUE,
|
||||
AGENT_TBL_PEND_ACTION_CNT_CACHE_PREFIX,
|
||||
ONLINE_AGENTS,
|
||||
AgentHistoryType,
|
||||
AgentMonType,
|
||||
@@ -37,7 +40,7 @@ from tacticalrmm.constants import (
|
||||
PAAction,
|
||||
PAStatus,
|
||||
)
|
||||
from tacticalrmm.helpers import get_nats_ports
|
||||
from tacticalrmm.helpers import setup_nats_options
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -130,8 +133,8 @@ class Agent(BaseAuditModel):
|
||||
# return the default timezone unless the timezone is explicity set per agent
|
||||
if self.time_zone:
|
||||
return self.time_zone
|
||||
else:
|
||||
return get_core_settings().default_time_zone
|
||||
|
||||
return get_core_settings().default_time_zone
|
||||
|
||||
@property
|
||||
def is_posix(self) -> bool:
|
||||
@@ -198,8 +201,9 @@ class Agent(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def status(self) -> str:
|
||||
offline = djangotime.now() - djangotime.timedelta(minutes=self.offline_time)
|
||||
overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time)
|
||||
now = djangotime.now()
|
||||
offline = now - djangotime.timedelta(minutes=self.offline_time)
|
||||
overdue = now - djangotime.timedelta(minutes=self.overdue_time)
|
||||
|
||||
if self.last_seen is not None:
|
||||
if (self.last_seen < offline) and (self.last_seen > overdue):
|
||||
@@ -213,8 +217,6 @@ class Agent(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def checks(self) -> Dict[str, Any]:
|
||||
from checks.models import CheckResult
|
||||
|
||||
total, passing, failing, warning, info = 0, 0, 0, 0, 0
|
||||
|
||||
for check in self.get_checks_with_policies(exclude_overridden=True):
|
||||
@@ -232,12 +234,12 @@ class Agent(BaseAuditModel):
|
||||
alert_severity = (
|
||||
check.check_result.alert_severity
|
||||
if check.check_type
|
||||
in [
|
||||
in (
|
||||
CheckType.MEMORY,
|
||||
CheckType.CPU_LOAD,
|
||||
CheckType.DISK_SPACE,
|
||||
CheckType.SCRIPT,
|
||||
]
|
||||
)
|
||||
else check.alert_severity
|
||||
)
|
||||
if alert_severity == AlertSeverity.ERROR:
|
||||
@@ -257,6 +259,15 @@ class Agent(BaseAuditModel):
|
||||
}
|
||||
return ret
|
||||
|
||||
@property
|
||||
def pending_actions_count(self) -> int:
|
||||
ret = cache.get(f"{AGENT_TBL_PEND_ACTION_CNT_CACHE_PREFIX}{self.pk}")
|
||||
if ret is None:
|
||||
ret = self.pendingactions.filter(status=PAStatus.PENDING).count()
|
||||
cache.set(f"{AGENT_TBL_PEND_ACTION_CNT_CACHE_PREFIX}{self.pk}", ret, 600)
|
||||
|
||||
return ret
|
||||
|
||||
@property
|
||||
def cpu_model(self) -> List[str]:
|
||||
if self.is_posix:
|
||||
@@ -333,8 +344,8 @@ class Agent(BaseAuditModel):
|
||||
|
||||
if len(ret) == 1:
|
||||
return cast(str, ret[0])
|
||||
else:
|
||||
return ", ".join(ret) if ret else "error getting local ips"
|
||||
|
||||
return ", ".join(ret) if ret else "error getting local ips"
|
||||
|
||||
@property
|
||||
def make_model(self) -> str:
|
||||
@@ -344,7 +355,7 @@ class Agent(BaseAuditModel):
|
||||
except:
|
||||
return "error getting make/model"
|
||||
|
||||
try:
|
||||
with suppress(Exception):
|
||||
comp_sys = self.wmi_detail["comp_sys"][0]
|
||||
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
||||
make = [x["Vendor"] for x in comp_sys_prod if "Vendor" in x][0]
|
||||
@@ -361,14 +372,10 @@ class Agent(BaseAuditModel):
|
||||
model = sysfam
|
||||
|
||||
return f"{make} {model}"
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
with suppress(Exception):
|
||||
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
||||
return cast(str, [x["Version"] for x in comp_sys_prod if "Version" in x][0])
|
||||
except:
|
||||
pass
|
||||
|
||||
return "unknown make/model"
|
||||
|
||||
@@ -423,7 +430,6 @@ class Agent(BaseAuditModel):
|
||||
def get_checks_with_policies(
|
||||
self, exclude_overridden: bool = False
|
||||
) -> "List[Check]":
|
||||
|
||||
if exclude_overridden:
|
||||
checks = (
|
||||
list(
|
||||
@@ -438,12 +444,10 @@ class Agent(BaseAuditModel):
|
||||
return self.add_check_results(checks)
|
||||
|
||||
def get_tasks_with_policies(self) -> "List[AutomatedTask]":
|
||||
|
||||
tasks = list(self.autotasks.all()) + self.get_tasks_from_policies()
|
||||
return self.add_task_results(tasks)
|
||||
|
||||
def add_task_results(self, tasks: "List[AutomatedTask]") -> "List[AutomatedTask]":
|
||||
|
||||
results = self.taskresults.all() # type: ignore
|
||||
|
||||
for task in tasks:
|
||||
@@ -455,7 +459,6 @@ class Agent(BaseAuditModel):
|
||||
return tasks
|
||||
|
||||
def add_check_results(self, checks: "List[Check]") -> "List[Check]":
|
||||
|
||||
results = self.checkresults.all() # type: ignore
|
||||
|
||||
for check in checks:
|
||||
@@ -479,7 +482,7 @@ class Agent(BaseAuditModel):
|
||||
models.prefetch_related_objects(
|
||||
[
|
||||
policy
|
||||
for policy in [self.policy, site_policy, client_policy, default_policy]
|
||||
for policy in (self.policy, site_policy, client_policy, default_policy)
|
||||
if policy
|
||||
],
|
||||
"excluded_agents",
|
||||
@@ -517,7 +520,6 @@ class Agent(BaseAuditModel):
|
||||
# determine if any agent checks have a custom interval and set the lowest interval
|
||||
for check in self.get_checks_with_policies():
|
||||
if check.run_interval and check.run_interval < interval:
|
||||
|
||||
# don't allow check runs less than 15s
|
||||
interval = 15 if check.run_interval < 15 else check.run_interval
|
||||
|
||||
@@ -533,8 +535,8 @@ class Agent(BaseAuditModel):
|
||||
run_on_any: bool = False,
|
||||
history_pk: int = 0,
|
||||
run_as_user: bool = False,
|
||||
env_vars: list[str] = [],
|
||||
) -> Any:
|
||||
|
||||
from scripts.models import Script
|
||||
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
@@ -554,6 +556,7 @@ class Agent(BaseAuditModel):
|
||||
"shell": script.shell,
|
||||
},
|
||||
"run_as_user": run_as_user,
|
||||
"env_vars": env_vars,
|
||||
}
|
||||
|
||||
if history_pk != 0:
|
||||
@@ -589,7 +592,7 @@ class Agent(BaseAuditModel):
|
||||
def approve_updates(self) -> None:
|
||||
patch_policy = self.get_patch_policy()
|
||||
|
||||
severity_list = list()
|
||||
severity_list = []
|
||||
if patch_policy.critical == "approve":
|
||||
severity_list.append("Critical")
|
||||
|
||||
@@ -621,17 +624,14 @@ class Agent(BaseAuditModel):
|
||||
if not agent_policy:
|
||||
agent_policy = WinUpdatePolicy.objects.create(agent=self)
|
||||
|
||||
# Get the list of policies applied to the agent and select the
|
||||
# highest priority one.
|
||||
policies = self.get_agent_policies()
|
||||
|
||||
processed_policies: List[int] = list()
|
||||
for _, policy in policies.items():
|
||||
if (
|
||||
policy
|
||||
and policy.active
|
||||
and policy.pk not in processed_policies
|
||||
and policy.winupdatepolicy.exists()
|
||||
):
|
||||
if policy and policy.active and policy.winupdatepolicy.exists():
|
||||
patch_policy = policy.winupdatepolicy.first()
|
||||
break
|
||||
|
||||
# if policy still doesn't exist return the agent patch policy
|
||||
if not patch_policy:
|
||||
@@ -683,7 +683,7 @@ class Agent(BaseAuditModel):
|
||||
policies = self.get_agent_policies()
|
||||
|
||||
# loop through all policies applied to agent and return an alert_template if found
|
||||
processed_policies: List[int] = list()
|
||||
processed_policies: List[int] = []
|
||||
for key, policy in policies.items():
|
||||
# default alert_template will override a default policy with alert template applied
|
||||
if (
|
||||
@@ -793,17 +793,9 @@ class Agent(BaseAuditModel):
|
||||
async def nats_cmd(
|
||||
self, data: Dict[Any, Any], timeout: int = 30, wait: bool = True
|
||||
) -> Any:
|
||||
nats_std_port, _ = get_nats_ports()
|
||||
options = {
|
||||
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:{nats_std_port}",
|
||||
"user": "tacticalrmm",
|
||||
"password": settings.SECRET_KEY,
|
||||
"connect_timeout": 3,
|
||||
"max_reconnect_attempts": 2,
|
||||
}
|
||||
|
||||
opts = setup_nats_options()
|
||||
try:
|
||||
nc = await nats.connect(**options)
|
||||
nc = await nats.connect(**opts)
|
||||
except:
|
||||
return "natsdown"
|
||||
|
||||
@@ -835,9 +827,12 @@ class Agent(BaseAuditModel):
|
||||
Return type: tuple(message: str, error: bool)
|
||||
"""
|
||||
if mode == "tacagent":
|
||||
if self.is_posix:
|
||||
if self.plat == AgentPlat.LINUX:
|
||||
cmd = "systemctl restart tacticalagent.service"
|
||||
shell = 3
|
||||
elif self.plat == AgentPlat.DARWIN:
|
||||
cmd = "launchctl kickstart -k system/tacticalagent"
|
||||
shell = 3
|
||||
else:
|
||||
cmd = "net stop tacticalrmm & taskkill /F /IM tacticalrmm.exe & net start tacticalrmm"
|
||||
shell = 1
|
||||
@@ -870,7 +865,7 @@ class Agent(BaseAuditModel):
|
||||
return AgentAuditSerializer(agent).data
|
||||
|
||||
def delete_superseded_updates(self) -> None:
|
||||
try:
|
||||
with suppress(Exception):
|
||||
pks = [] # list of pks to delete
|
||||
kbs = list(self.winupdates.values_list("kb", flat=True))
|
||||
d = Counter(kbs)
|
||||
@@ -895,8 +890,6 @@ class Agent(BaseAuditModel):
|
||||
|
||||
pks = list(set(pks))
|
||||
self.winupdates.filter(pk__in=pks).delete()
|
||||
except:
|
||||
pass
|
||||
|
||||
def should_create_alert(
|
||||
self, alert_template: "Optional[AlertTemplate]" = None
|
||||
@@ -1015,16 +1008,16 @@ class AgentCustomField(models.Model):
|
||||
return cast(List[str], self.multiple_value)
|
||||
elif self.field.type == CustomFieldType.CHECKBOX:
|
||||
return self.bool_value
|
||||
else:
|
||||
return cast(str, self.string_value)
|
||||
|
||||
return cast(str, self.string_value)
|
||||
|
||||
def save_to_field(self, value: Union[List[Any], bool, str]) -> None:
|
||||
if self.field.type in [
|
||||
if self.field.type in (
|
||||
CustomFieldType.TEXT,
|
||||
CustomFieldType.NUMBER,
|
||||
CustomFieldType.SINGLE,
|
||||
CustomFieldType.DATETIME,
|
||||
]:
|
||||
):
|
||||
self.string_value = cast(str, value)
|
||||
self.save()
|
||||
elif self.field.type == CustomFieldType.MULTIPLE:
|
||||
|
||||
@@ -96,10 +96,8 @@ class RunScriptPerms(permissions.BasePermission):
|
||||
|
||||
class AgentNotesPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
|
||||
# permissions for GET /agents/notes/ endpoint
|
||||
if r.method == "GET":
|
||||
|
||||
# permissions for /agents/<agent_id>/notes endpoint
|
||||
if "agent_id" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_notes") and _has_perm_on_agent(
|
||||
@@ -122,5 +120,5 @@ class AgentHistoryPerms(permissions.BasePermission):
|
||||
return _has_perm(r, "can_list_agent_history") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_agent_history")
|
||||
|
||||
return _has_perm(r, "can_list_agent_history")
|
||||
|
||||
@@ -97,24 +97,23 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
physical_disks = serializers.ReadOnlyField()
|
||||
|
||||
def get_alert_template(self, obj):
|
||||
|
||||
if not obj.alert_template:
|
||||
return None
|
||||
else:
|
||||
return {
|
||||
"name": obj.alert_template.name,
|
||||
"always_email": obj.alert_template.agent_always_email,
|
||||
"always_text": obj.alert_template.agent_always_text,
|
||||
"always_alert": obj.alert_template.agent_always_alert,
|
||||
}
|
||||
|
||||
return {
|
||||
"name": obj.alert_template.name,
|
||||
"always_email": obj.alert_template.agent_always_email,
|
||||
"always_text": obj.alert_template.agent_always_text,
|
||||
"always_alert": obj.alert_template.agent_always_alert,
|
||||
}
|
||||
|
||||
def get_logged_username(self, obj) -> str:
|
||||
if obj.logged_in_username == "None" and obj.status == AGENT_STATUS_ONLINE:
|
||||
return obj.last_logged_in_user
|
||||
elif obj.logged_in_username != "None":
|
||||
return obj.logged_in_username
|
||||
else:
|
||||
return "-"
|
||||
|
||||
return "-"
|
||||
|
||||
def get_italic(self, obj) -> bool:
|
||||
return obj.logged_in_username == "None" and obj.status == AGENT_STATUS_ONLINE
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import datetime as dt
|
||||
import random
|
||||
from time import sleep
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
@@ -13,10 +12,13 @@ from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_DEFER,
|
||||
AGENT_OUTAGES_LOCK,
|
||||
AGENT_STATUS_OVERDUE,
|
||||
CheckStatus,
|
||||
DebugLogType,
|
||||
)
|
||||
from tacticalrmm.helpers import rand_range
|
||||
from tacticalrmm.utils import redis_lock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.db.models.query import QuerySet
|
||||
@@ -46,7 +48,7 @@ def agent_outage_email_task(pk: int, alert_interval: Optional[float] = None) ->
|
||||
return "alert not found"
|
||||
|
||||
if not alert.email_sent:
|
||||
sleep(random.randint(1, 5))
|
||||
sleep(rand_range(100, 1500))
|
||||
alert.agent.send_outage_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
@@ -55,7 +57,7 @@ def agent_outage_email_task(pk: int, alert_interval: Optional[float] = None) ->
|
||||
# send an email only if the last email sent is older than alert interval
|
||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||
if alert.email_sent < delta:
|
||||
sleep(random.randint(1, 5))
|
||||
sleep(rand_range(100, 1500))
|
||||
alert.agent.send_outage_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
@@ -67,7 +69,7 @@ def agent_outage_email_task(pk: int, alert_interval: Optional[float] = None) ->
|
||||
def agent_recovery_email_task(pk: int) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
sleep(random.randint(1, 5))
|
||||
sleep(rand_range(100, 1500))
|
||||
|
||||
try:
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
@@ -91,7 +93,7 @@ def agent_outage_sms_task(pk: int, alert_interval: Optional[float] = None) -> st
|
||||
return "alert not found"
|
||||
|
||||
if not alert.sms_sent:
|
||||
sleep(random.randint(1, 3))
|
||||
sleep(rand_range(100, 1500))
|
||||
alert.agent.send_outage_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
@@ -100,7 +102,7 @@ def agent_outage_sms_task(pk: int, alert_interval: Optional[float] = None) -> st
|
||||
# send an sms only if the last sms sent is older than alert interval
|
||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||
if alert.sms_sent < delta:
|
||||
sleep(random.randint(1, 3))
|
||||
sleep(rand_range(100, 1500))
|
||||
alert.agent.send_outage_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
@@ -112,7 +114,7 @@ def agent_outage_sms_task(pk: int, alert_interval: Optional[float] = None) -> st
|
||||
def agent_recovery_sms_task(pk: int) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
sleep(random.randint(1, 3))
|
||||
sleep(rand_range(100, 1500))
|
||||
try:
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
except Alert.DoesNotExist:
|
||||
@@ -125,24 +127,20 @@ def agent_recovery_sms_task(pk: int) -> str:
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_outages_task() -> None:
|
||||
from alerts.models import Alert
|
||||
@app.task(bind=True)
|
||||
def agent_outages_task(self) -> str:
|
||||
with redis_lock(AGENT_OUTAGES_LOCK, self.app.oid) as acquired:
|
||||
if not acquired:
|
||||
return f"{self.app.oid} still running"
|
||||
|
||||
agents = Agent.objects.only(
|
||||
"pk",
|
||||
"agent_id",
|
||||
"last_seen",
|
||||
"offline_time",
|
||||
"overdue_time",
|
||||
"overdue_email_alert",
|
||||
"overdue_text_alert",
|
||||
"overdue_dashboard_alert",
|
||||
)
|
||||
from alerts.models import Alert
|
||||
from core.tasks import _get_agent_qs
|
||||
|
||||
for agent in agents:
|
||||
if agent.status == AGENT_STATUS_OVERDUE:
|
||||
Alert.handle_alert_failure(agent)
|
||||
for agent in _get_agent_qs():
|
||||
if agent.status == AGENT_STATUS_OVERDUE:
|
||||
Alert.handle_alert_failure(agent)
|
||||
|
||||
return "completed"
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -154,6 +152,7 @@ def run_script_email_results_task(
|
||||
args: list[str] = [],
|
||||
history_pk: int = 0,
|
||||
run_as_user: bool = False,
|
||||
env_vars: list[str] = [],
|
||||
):
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
@@ -165,6 +164,7 @@ def run_script_email_results_task(
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
run_as_user=run_as_user,
|
||||
env_vars=env_vars,
|
||||
)
|
||||
if r == "timeout":
|
||||
DebugLog.error(
|
||||
|
||||
@@ -264,7 +264,7 @@ class TestAgentUpdate(TacticalTestCase):
|
||||
agents = baker.make_recipe("agents.agent", _quantity=5)
|
||||
other_agents = baker.make_recipe("agents.agent", _quantity=7)
|
||||
|
||||
url = f"/agents/update/"
|
||||
url = "/agents/update/"
|
||||
|
||||
data = {
|
||||
"agent_ids": [agent.agent_id for agent in agents]
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from unittest.mock import patch, AsyncMock
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.response import Response
|
||||
|
||||
from agents.utils import generate_linux_install, get_agent_url
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
@@ -540,6 +540,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"args": [],
|
||||
"timeout": 15,
|
||||
"run_as_user": False,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -555,6 +556,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
run_as_user=False,
|
||||
env_vars=["hello=world", "foo=bar"],
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
@@ -567,6 +569,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"emailMode": "default",
|
||||
"emails": ["admin@example.com", "bob@example.com"],
|
||||
"run_as_user": False,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
@@ -577,6 +580,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
emails=[],
|
||||
args=["abc", "123"],
|
||||
run_as_user=False,
|
||||
env_vars=["hello=world", "foo=bar"],
|
||||
)
|
||||
email_task.reset_mock()
|
||||
|
||||
@@ -591,6 +595,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
emails=["admin@example.com", "bob@example.com"],
|
||||
args=["abc", "123"],
|
||||
run_as_user=False,
|
||||
env_vars=["hello=world", "foo=bar"],
|
||||
)
|
||||
|
||||
# test fire and forget
|
||||
@@ -600,6 +605,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"run_as_user": True,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -614,6 +620,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
timeout=25,
|
||||
history_pk=hist.pk,
|
||||
run_as_user=True,
|
||||
env_vars=["hello=world", "foo=bar"],
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
@@ -629,6 +636,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"custom_field": custom_field.pk,
|
||||
"save_all_output": True,
|
||||
"run_as_user": False,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -644,6 +652,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
run_as_user=False,
|
||||
env_vars=["hello=world", "foo=bar"],
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
@@ -662,6 +671,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"custom_field": custom_field.pk,
|
||||
"save_all_output": False,
|
||||
"run_as_user": False,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -677,6 +687,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
run_as_user=False,
|
||||
env_vars=["hello=world", "foo=bar"],
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
@@ -697,6 +708,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"custom_field": custom_field.pk,
|
||||
"save_all_output": False,
|
||||
"run_as_user": False,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -712,6 +724,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
run_as_user=False,
|
||||
env_vars=["hello=world", "foo=bar"],
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
@@ -729,6 +742,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"run_as_user": False,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -744,6 +758,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
run_as_user=False,
|
||||
env_vars=["hello=world", "foo=bar"],
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
@@ -836,7 +851,6 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_agent_history(self):
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
history = baker.make("agents.AgentHistory", agent=agent, _quantity=30)
|
||||
@@ -992,7 +1006,6 @@ class TestAgentPermissions(TacticalTestCase):
|
||||
@patch("time.sleep")
|
||||
@patch("agents.models.Agent.nats_cmd", return_value="ok")
|
||||
def test_agent_actions_permissions(self, nats_cmd, sleep):
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||
|
||||
@@ -1120,7 +1133,6 @@ class TestAgentPermissions(TacticalTestCase):
|
||||
self.assertEqual(len(response.data["agents"]), 7)
|
||||
|
||||
def test_generating_agent_installer_permissions(self):
|
||||
|
||||
client = baker.make("clients.Client")
|
||||
client_site = baker.make("clients.Site", client=client)
|
||||
site = baker.make("clients.Site")
|
||||
@@ -1183,7 +1195,6 @@ class TestAgentPermissions(TacticalTestCase):
|
||||
self.check_not_authorized("post", url, data)
|
||||
|
||||
def test_agent_notes_permissions(self):
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
notes = baker.make("agents.Note", agent=agent, _quantity=5)
|
||||
|
||||
@@ -1272,9 +1283,9 @@ class TestAgentPermissions(TacticalTestCase):
|
||||
|
||||
sites = baker.make("clients.Site", _quantity=2)
|
||||
agent = baker.make_recipe("agents.agent", site=sites[0])
|
||||
history = baker.make("agents.AgentHistory", agent=agent, _quantity=5)
|
||||
history = baker.make("agents.AgentHistory", agent=agent, _quantity=5) # noqa
|
||||
unauthorized_agent = baker.make_recipe("agents.agent", site=sites[1])
|
||||
unauthorized_history = baker.make(
|
||||
unauthorized_history = baker.make( # noqa
|
||||
"agents.AgentHistory", agent=unauthorized_agent, _quantity=6
|
||||
)
|
||||
|
||||
|
||||
@@ -42,4 +42,5 @@ urlpatterns = [
|
||||
path("update/", views.update_agents),
|
||||
path("installer/", views.install_agent),
|
||||
path("bulkrecovery/", views.bulk_agent_recovery),
|
||||
path("scripthistory/", views.ScriptRunHistory.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import FileResponse
|
||||
@@ -33,7 +34,6 @@ def generate_linux_install(
|
||||
api: str,
|
||||
download_url: str,
|
||||
) -> FileResponse:
|
||||
|
||||
match arch:
|
||||
case "amd64":
|
||||
arch_id = MeshAgentIdent.LINUX64
|
||||
@@ -54,9 +54,7 @@ def generate_linux_install(
|
||||
f"{core.mesh_site}/meshagents?id={mesh_id}&installflags=2&meshinstall={arch_id}"
|
||||
)
|
||||
|
||||
sh = settings.LINUX_AGENT_SCRIPT
|
||||
with open(sh, "r") as f:
|
||||
text = f.read()
|
||||
text = Path(settings.LINUX_AGENT_SCRIPT).read_text()
|
||||
|
||||
replace = {
|
||||
"agentDLChange": download_url,
|
||||
@@ -71,11 +69,8 @@ def generate_linux_install(
|
||||
for i, j in replace.items():
|
||||
text = text.replace(i, j)
|
||||
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
f.write(text)
|
||||
f.write("\n")
|
||||
|
||||
text += "\n"
|
||||
with StringIO(text) as fp:
|
||||
return FileResponse(
|
||||
open(fp.name, "rb"), as_attachment=True, filename="linux_agent_install.sh"
|
||||
fp.read(), as_attachment=True, filename="linux_agent_install.sh"
|
||||
)
|
||||
|
||||
@@ -1,15 +1,24 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import time
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
|
||||
from core.utils import (
|
||||
get_core_settings,
|
||||
get_mesh_ws_url,
|
||||
remove_mesh_agent,
|
||||
token_is_valid,
|
||||
)
|
||||
from django.conf import settings
|
||||
from django.db.models import Count, Exists, OuterRef, Prefetch, Q
|
||||
from django.db.models import Exists, OuterRef, Prefetch, Q
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from logs.models import AuditLog, DebugLog, PendingAction
|
||||
from meshctrl.utils import get_login_token
|
||||
from packaging import version as pyver
|
||||
from rest_framework import serializers
|
||||
@@ -18,21 +27,13 @@ from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from core.utils import (
|
||||
get_core_settings,
|
||||
get_mesh_ws_url,
|
||||
remove_mesh_agent,
|
||||
token_is_valid,
|
||||
)
|
||||
from logs.models import AuditLog, DebugLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_DEFER,
|
||||
AGENT_TABLE_DEFER,
|
||||
AGENT_STATUS_OFFLINE,
|
||||
AGENT_STATUS_ONLINE,
|
||||
AGENT_TABLE_DEFER,
|
||||
AgentHistoryType,
|
||||
AgentMonType,
|
||||
AgentPlat,
|
||||
@@ -40,7 +41,6 @@ from tacticalrmm.constants import (
|
||||
DebugLogType,
|
||||
EvtLogNames,
|
||||
PAAction,
|
||||
PAStatus,
|
||||
)
|
||||
from tacticalrmm.helpers import date_is_in_past, notify_error
|
||||
from tacticalrmm.permissions import (
|
||||
@@ -135,18 +135,12 @@ class GetAgents(APIView):
|
||||
queryset=CheckResult.objects.select_related("assigned_check"),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
pending_actions_count=Count(
|
||||
"pendingactions",
|
||||
filter=Q(pendingactions__status=PAStatus.PENDING),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
has_patches_pending=Exists(
|
||||
WinUpdate.objects.filter(
|
||||
agent_id=OuterRef("pk"), action="approve", installed=False
|
||||
)
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
serializer = AgentTableSerializer(agents, many=True)
|
||||
@@ -174,6 +168,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
fields = [
|
||||
"maintenance_mode", # TODO separate this
|
||||
"policy", # TODO separate this
|
||||
"block_policy_inheritance", # TODO separate this
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"overdue_email_alert",
|
||||
@@ -208,9 +203,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
p_serializer.save()
|
||||
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["agent"] = agent.pk
|
||||
|
||||
@@ -236,10 +229,11 @@ class GetUpdateDeleteAgent(APIView):
|
||||
def delete(self, request, agent_id):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
|
||||
code = "foo"
|
||||
code = "foo" # stub for windows
|
||||
if agent.plat == AgentPlat.LINUX:
|
||||
with open(settings.LINUX_AGENT_SCRIPT, "r") as f:
|
||||
code = f.read()
|
||||
code = Path(settings.LINUX_AGENT_SCRIPT).read_text()
|
||||
elif agent.plat == AgentPlat.DARWIN:
|
||||
code = Path(settings.MAC_UNINSTALL).read_text()
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall", "code": code}, wait=False))
|
||||
name = agent.hostname
|
||||
@@ -251,7 +245,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
asyncio.run(remove_mesh_agent(uri, mesh_id))
|
||||
except Exception as e:
|
||||
DebugLog.error(
|
||||
message=f"Unable to remove agent {name} from meshcentral database: {str(e)}",
|
||||
message=f"Unable to remove agent {name} from meshcentral database: {e}",
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
)
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
@@ -269,7 +263,7 @@ class AgentProcesses(APIView):
|
||||
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||
if r == "timeout" or r == "natsdown":
|
||||
if r in ("timeout", "natsdown"):
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response(r)
|
||||
|
||||
@@ -280,7 +274,7 @@ class AgentProcesses(APIView):
|
||||
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "natsdown":
|
||||
if r in ("timeout", "natsdown"):
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r != "ok":
|
||||
return notify_error(r)
|
||||
@@ -412,7 +406,7 @@ def get_event_log(request, agent_id, logtype, days):
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
if r == "timeout" or r == "natsdown":
|
||||
if r in ("timeout", "natsdown"):
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response(r)
|
||||
@@ -464,6 +458,7 @@ def send_raw_cmd(request, agent_id):
|
||||
|
||||
class Reboot(APIView):
|
||||
permission_classes = [IsAuthenticated, RebootAgentPerms]
|
||||
|
||||
# reboot now
|
||||
def post(self, request, agent_id):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
@@ -533,11 +528,17 @@ class Reboot(APIView):
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAuthenticated, InstallAgentPerms])
|
||||
def install_agent(request):
|
||||
from knox.models import AuthToken
|
||||
|
||||
from accounts.models import User
|
||||
from agents.utils import get_agent_url
|
||||
from core.utils import token_is_valid
|
||||
from knox.models import AuthToken
|
||||
|
||||
# TODO rework this ghetto validation hack
|
||||
# https://github.com/amidaware/tacticalrmm/issues/1461
|
||||
try:
|
||||
int(request.data["expires"])
|
||||
except ValueError:
|
||||
return notify_error("Please enter a valid number of hours")
|
||||
|
||||
client_id = request.data["client"]
|
||||
site_id = request.data["site"]
|
||||
@@ -550,15 +551,38 @@ def install_agent(request):
|
||||
|
||||
codesign_token, is_valid = token_is_valid()
|
||||
|
||||
inno = f"tacticalagent-v{version}-{plat}-{goarch}.exe"
|
||||
if request.data["installMethod"] in {"bash", "mac"} and not is_valid:
|
||||
return notify_error(
|
||||
"Linux/Mac agents require code signing. Please see https://docs.tacticalrmm.com/code_signing/ for more info."
|
||||
)
|
||||
|
||||
inno = f"tacticalagent-v{version}-{plat}-{goarch}"
|
||||
if plat == AgentPlat.WINDOWS:
|
||||
inno += ".exe"
|
||||
|
||||
download_url = get_agent_url(goarch=goarch, plat=plat, token=codesign_token)
|
||||
|
||||
installer_user = User.objects.filter(is_installer_user=True).first()
|
||||
|
||||
_, token = AuthToken.objects.create(
|
||||
user=installer_user, expiry=dt.timedelta(hours=request.data["expires"])
|
||||
user=installer_user, expiry=dt.timedelta(hours=int(request.data["expires"]))
|
||||
)
|
||||
|
||||
install_flags = [
|
||||
"-m",
|
||||
"install",
|
||||
"--api",
|
||||
request.data["api"],
|
||||
"--client-id",
|
||||
client_id,
|
||||
"--site-id",
|
||||
site_id,
|
||||
"--agent-type",
|
||||
request.data["agenttype"],
|
||||
"--auth",
|
||||
token,
|
||||
]
|
||||
|
||||
if request.data["installMethod"] == "exe":
|
||||
from tacticalrmm.utils import generate_winagent_exe
|
||||
|
||||
@@ -576,15 +600,6 @@ def install_agent(request):
|
||||
)
|
||||
|
||||
elif request.data["installMethod"] == "bash":
|
||||
# TODO
|
||||
# linux agents are in beta for now, only available for sponsors for testing
|
||||
# remove this after it's out of beta
|
||||
|
||||
if not is_valid:
|
||||
return notify_error(
|
||||
"Missing code signing token, or token is no longer valid. Please read the docs for more info."
|
||||
)
|
||||
|
||||
from agents.utils import generate_linux_install
|
||||
|
||||
return generate_linux_install(
|
||||
@@ -597,52 +612,44 @@ def install_agent(request):
|
||||
download_url=download_url,
|
||||
)
|
||||
|
||||
elif request.data["installMethod"] == "manual":
|
||||
cmd = [
|
||||
inno,
|
||||
"/VERYSILENT",
|
||||
"/SUPPRESSMSGBOXES",
|
||||
"&&",
|
||||
"ping",
|
||||
"127.0.0.1",
|
||||
"-n",
|
||||
"5",
|
||||
"&&",
|
||||
r'"C:\Program Files\TacticalAgent\tacticalrmm.exe"',
|
||||
"-m",
|
||||
"install",
|
||||
"--api",
|
||||
request.data["api"],
|
||||
"--client-id",
|
||||
client_id,
|
||||
"--site-id",
|
||||
site_id,
|
||||
"--agent-type",
|
||||
request.data["agenttype"],
|
||||
"--auth",
|
||||
token,
|
||||
]
|
||||
elif request.data["installMethod"] in {"manual", "mac"}:
|
||||
resp = {}
|
||||
if request.data["installMethod"] == "manual":
|
||||
cmd = [
|
||||
inno,
|
||||
"/VERYSILENT",
|
||||
"/SUPPRESSMSGBOXES",
|
||||
"&&",
|
||||
"ping",
|
||||
"127.0.0.1",
|
||||
"-n",
|
||||
"5",
|
||||
"&&",
|
||||
r'"C:\Program Files\TacticalAgent\tacticalrmm.exe"',
|
||||
] + install_flags
|
||||
|
||||
if int(request.data["rdp"]):
|
||||
cmd.append("--rdp")
|
||||
if int(request.data["ping"]):
|
||||
cmd.append("--ping")
|
||||
if int(request.data["power"]):
|
||||
cmd.append("--power")
|
||||
if int(request.data["rdp"]):
|
||||
cmd.append("--rdp")
|
||||
if int(request.data["ping"]):
|
||||
cmd.append("--ping")
|
||||
if int(request.data["power"]):
|
||||
cmd.append("--power")
|
||||
|
||||
resp = {
|
||||
"cmd": " ".join(str(i) for i in cmd),
|
||||
"url": download_url,
|
||||
}
|
||||
resp["cmd"] = " ".join(str(i) for i in cmd)
|
||||
else:
|
||||
install_flags.insert(0, f"sudo ./{inno}")
|
||||
cmd = install_flags.copy()
|
||||
dl = f"curl -L -o {inno} '{download_url}'"
|
||||
resp["cmd"] = (
|
||||
dl + f" && chmod +x {inno} && " + " ".join(str(i) for i in cmd)
|
||||
)
|
||||
|
||||
resp["url"] = download_url
|
||||
|
||||
return Response(resp)
|
||||
|
||||
elif request.data["installMethod"] == "powershell":
|
||||
|
||||
ps = os.path.join(settings.BASE_DIR, "core/installer.ps1")
|
||||
|
||||
with open(ps, "r") as f:
|
||||
text = f.read()
|
||||
text = Path(settings.BASE_DIR / "core" / "installer.ps1").read_text()
|
||||
|
||||
replace_dict = {
|
||||
"innosetupchange": inno,
|
||||
@@ -660,27 +667,9 @@ def install_agent(request):
|
||||
for i, j in replace_dict.items():
|
||||
text = text.replace(i, j)
|
||||
|
||||
file_name = "rmm-installer.ps1"
|
||||
ps1 = os.path.join(settings.EXE_DIR, file_name)
|
||||
|
||||
if os.path.exists(ps1):
|
||||
try:
|
||||
os.remove(ps1)
|
||||
except Exception as e:
|
||||
DebugLog.error(message=str(e))
|
||||
|
||||
with open(ps1, "w") as f:
|
||||
f.write(text)
|
||||
|
||||
if settings.DEBUG:
|
||||
with open(ps1, "r") as f:
|
||||
response = HttpResponse(f.read(), content_type="text/plain")
|
||||
response["Content-Disposition"] = f"inline; filename={file_name}"
|
||||
return response
|
||||
else:
|
||||
response = HttpResponse()
|
||||
response["Content-Disposition"] = f"attachment; filename={file_name}"
|
||||
response["X-Accel-Redirect"] = f"/private/exe/{file_name}"
|
||||
with StringIO(text) as fp:
|
||||
response = HttpResponse(fp.read(), content_type="text/plain")
|
||||
response["Content-Disposition"] = "attachment; filename=rmm-installer.ps1"
|
||||
return response
|
||||
|
||||
|
||||
@@ -713,6 +702,7 @@ def run_script(request, agent_id):
|
||||
output = request.data["output"]
|
||||
args = request.data["args"]
|
||||
run_as_user: bool = request.data["run_as_user"]
|
||||
env_vars: list[str] = request.data["env_vars"]
|
||||
req_timeout = int(request.data["timeout"]) + 3
|
||||
|
||||
AuditLog.audit_script_run(
|
||||
@@ -738,6 +728,7 @@ def run_script(request, agent_id):
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
run_as_user=run_as_user,
|
||||
env_vars=env_vars,
|
||||
)
|
||||
return Response(r)
|
||||
|
||||
@@ -752,6 +743,7 @@ def run_script(request, agent_id):
|
||||
emails=emails,
|
||||
args=args,
|
||||
run_as_user=run_as_user,
|
||||
env_vars=env_vars,
|
||||
)
|
||||
elif output == "collector":
|
||||
from core.models import CustomField
|
||||
@@ -763,6 +755,7 @@ def run_script(request, agent_id):
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
run_as_user=run_as_user,
|
||||
env_vars=env_vars,
|
||||
)
|
||||
|
||||
custom_field = CustomField.objects.get(pk=request.data["custom_field"])
|
||||
@@ -792,6 +785,7 @@ def run_script(request, agent_id):
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
run_as_user=run_as_user,
|
||||
env_vars=env_vars,
|
||||
)
|
||||
|
||||
Note.objects.create(agent=agent, user=request.user, note=r)
|
||||
@@ -803,6 +797,7 @@ def run_script(request, agent_id):
|
||||
timeout=req_timeout,
|
||||
history_pk=history_pk,
|
||||
run_as_user=run_as_user,
|
||||
env_vars=env_vars,
|
||||
)
|
||||
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
@@ -912,6 +907,8 @@ def bulk(request):
|
||||
q = q.filter(plat=AgentPlat.WINDOWS)
|
||||
elif request.data["osType"] == AgentPlat.LINUX:
|
||||
q = q.filter(plat=AgentPlat.LINUX)
|
||||
elif request.data["osType"] == AgentPlat.DARWIN:
|
||||
q = q.filter(plat=AgentPlat.DARWIN)
|
||||
|
||||
agents: list[int] = [agent.pk for agent in q]
|
||||
|
||||
@@ -950,11 +947,11 @@ def bulk(request):
|
||||
request.data["timeout"],
|
||||
request.user.username[:50],
|
||||
request.data["run_as_user"],
|
||||
request.data["env_vars"],
|
||||
)
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "patch":
|
||||
|
||||
if request.data["patchMode"] == "install":
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
@@ -970,7 +967,6 @@ def bulk(request):
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAuthenticated, AgentPerms])
|
||||
def agent_maintenance(request):
|
||||
|
||||
if request.data["type"] == "Client":
|
||||
if not _has_perm_on_client(request.user, request.data["id"]):
|
||||
raise PermissionDenied()
|
||||
@@ -997,10 +993,10 @@ def agent_maintenance(request):
|
||||
if count:
|
||||
action = "disabled" if not request.data["action"] else "enabled"
|
||||
return Response(f"Maintenance mode has been {action} on {count} agents")
|
||||
else:
|
||||
return Response(
|
||||
f"No agents have been put in maintenance mode. You might not have permissions to the resources."
|
||||
)
|
||||
|
||||
return Response(
|
||||
"No agents have been put in maintenance mode. You might not have permissions to the resources."
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@@ -1032,3 +1028,98 @@ class AgentHistoryView(APIView):
|
||||
history = AgentHistory.objects.filter_by_role(request.user) # type: ignore
|
||||
ctx = {"default_tz": get_default_timezone()}
|
||||
return Response(AgentHistorySerializer(history, many=True, context=ctx).data)
|
||||
|
||||
|
||||
class ScriptRunHistory(APIView):
|
||||
permission_classes = [IsAuthenticated, AgentHistoryPerms]
|
||||
|
||||
class OutputSerializer(serializers.ModelSerializer):
|
||||
script_name = serializers.ReadOnlyField(source="script.name")
|
||||
agent_id = serializers.ReadOnlyField(source="agent.agent_id")
|
||||
|
||||
class Meta:
|
||||
model = AgentHistory
|
||||
fields = (
|
||||
"id",
|
||||
"time",
|
||||
"username",
|
||||
"script",
|
||||
"script_results",
|
||||
"agent",
|
||||
"script_name",
|
||||
"agent_id",
|
||||
)
|
||||
read_only_fields = fields
|
||||
|
||||
def get(self, request):
|
||||
date_range_filter = Q()
|
||||
script_name_filter = Q()
|
||||
|
||||
start = request.query_params.get("start", None)
|
||||
end = request.query_params.get("end", None)
|
||||
limit = request.query_params.get("limit", None)
|
||||
script_name = request.query_params.get("scriptname", None)
|
||||
if start and end:
|
||||
start_dt = parse_datetime(start)
|
||||
end_dt = parse_datetime(end) + djangotime.timedelta(days=1)
|
||||
date_range_filter = Q(time__range=[start_dt, end_dt])
|
||||
|
||||
if script_name:
|
||||
script_name_filter = Q(script__name=script_name)
|
||||
|
||||
AGENT_R_DEFER = (
|
||||
"agent__wmi_detail",
|
||||
"agent__services",
|
||||
"agent__created_by",
|
||||
"agent__created_time",
|
||||
"agent__modified_by",
|
||||
"agent__modified_time",
|
||||
"agent__disks",
|
||||
"agent__operating_system",
|
||||
"agent__mesh_node_id",
|
||||
"agent__description",
|
||||
"agent__patches_last_installed",
|
||||
"agent__time_zone",
|
||||
"agent__alert_template_id",
|
||||
"agent__policy_id",
|
||||
"agent__site_id",
|
||||
"agent__version",
|
||||
"agent__plat",
|
||||
"agent__goarch",
|
||||
"agent__hostname",
|
||||
"agent__last_seen",
|
||||
"agent__public_ip",
|
||||
"agent__total_ram",
|
||||
"agent__boot_time",
|
||||
"agent__logged_in_username",
|
||||
"agent__last_logged_in_user",
|
||||
"agent__monitoring_type",
|
||||
"agent__overdue_email_alert",
|
||||
"agent__overdue_text_alert",
|
||||
"agent__overdue_dashboard_alert",
|
||||
"agent__offline_time",
|
||||
"agent__overdue_time",
|
||||
"agent__check_interval",
|
||||
"agent__needs_reboot",
|
||||
"agent__choco_installed",
|
||||
"agent__maintenance_mode",
|
||||
"agent__block_policy_inheritance",
|
||||
)
|
||||
hists = (
|
||||
AgentHistory.objects.filter(type=AgentHistoryType.SCRIPT_RUN)
|
||||
.select_related("agent")
|
||||
.select_related("script")
|
||||
.defer(*AGENT_R_DEFER)
|
||||
.filter(date_range_filter)
|
||||
.filter(script_name_filter)
|
||||
.order_by("-time")
|
||||
)
|
||||
if limit:
|
||||
try:
|
||||
lim = int(limit)
|
||||
except KeyError:
|
||||
return notify_error("Invalid limit")
|
||||
hists = hists[:lim]
|
||||
|
||||
ret = self.OutputSerializer(hists, many=True).data
|
||||
return Response(ret)
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
# Generated by Django 4.1.3 on 2022-11-26 20:22
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("alerts", "0012_alter_alert_action_retcode_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="alerttemplate",
|
||||
name="action_env_vars",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(blank=True, null=True),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="alerttemplate",
|
||||
name="resolved_action_env_vars",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(blank=True, null=True),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -10,6 +10,7 @@ from django.utils import timezone as djangotime
|
||||
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from tacticalrmm.constants import (
|
||||
AgentHistoryType,
|
||||
AgentMonType,
|
||||
AlertSeverity,
|
||||
AlertType,
|
||||
@@ -153,7 +154,6 @@ class Alert(models.Model):
|
||||
alert_severity: Optional[str] = None,
|
||||
skip_create: bool = False,
|
||||
) -> "Optional[Alert]":
|
||||
|
||||
# need to pass agent if the check is a policy
|
||||
if not cls.objects.filter(
|
||||
assigned_check=check,
|
||||
@@ -171,12 +171,12 @@ class Alert(models.Model):
|
||||
alert_type=AlertType.CHECK,
|
||||
severity=check.alert_severity
|
||||
if check.check_type
|
||||
not in [
|
||||
not in {
|
||||
CheckType.MEMORY,
|
||||
CheckType.CPU_LOAD,
|
||||
CheckType.DISK_SPACE,
|
||||
CheckType.SCRIPT,
|
||||
]
|
||||
}
|
||||
else alert_severity,
|
||||
message=f"{agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
||||
hidden=True,
|
||||
@@ -216,7 +216,6 @@ class Alert(models.Model):
|
||||
agent: "Agent",
|
||||
skip_create: bool = False,
|
||||
) -> "Optional[Alert]":
|
||||
|
||||
if not cls.objects.filter(
|
||||
assigned_task=task,
|
||||
agent=agent,
|
||||
@@ -268,7 +267,7 @@ class Alert(models.Model):
|
||||
def handle_alert_failure(
|
||||
cls, instance: Union[Agent, TaskResult, CheckResult]
|
||||
) -> None:
|
||||
from agents.models import Agent
|
||||
from agents.models import Agent, AgentHistory
|
||||
from autotasks.models import TaskResult
|
||||
from checks.models import CheckResult
|
||||
|
||||
@@ -327,12 +326,12 @@ class Alert(models.Model):
|
||||
alert_severity = (
|
||||
instance.assigned_check.alert_severity
|
||||
if instance.assigned_check.check_type
|
||||
not in [
|
||||
not in {
|
||||
CheckType.MEMORY,
|
||||
CheckType.CPU_LOAD,
|
||||
CheckType.DISK_SPACE,
|
||||
CheckType.SCRIPT,
|
||||
]
|
||||
}
|
||||
else instance.alert_severity
|
||||
)
|
||||
agent = instance.agent
|
||||
@@ -341,23 +340,20 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
dashboard_severities = (
|
||||
alert_template.check_dashboard_alert_severity
|
||||
if alert_template.check_dashboard_alert_severity
|
||||
else [
|
||||
or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
AlertSeverity.INFO,
|
||||
]
|
||||
)
|
||||
email_severities = (
|
||||
alert_template.check_email_alert_severity
|
||||
if alert_template.check_email_alert_severity
|
||||
else [AlertSeverity.ERROR, AlertSeverity.WARNING]
|
||||
)
|
||||
text_severities = (
|
||||
alert_template.check_text_alert_severity
|
||||
if alert_template.check_text_alert_severity
|
||||
else [AlertSeverity.ERROR, AlertSeverity.WARNING]
|
||||
)
|
||||
email_severities = alert_template.check_email_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
text_severities = alert_template.check_text_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
always_dashboard = alert_template.check_always_alert
|
||||
always_email = alert_template.check_always_email
|
||||
always_text = alert_template.check_always_text
|
||||
@@ -380,21 +376,18 @@ class Alert(models.Model):
|
||||
|
||||
# set alert_template settings
|
||||
if alert_template:
|
||||
dashboard_severities = (
|
||||
alert_template.task_dashboard_alert_severity
|
||||
if alert_template.task_dashboard_alert_severity
|
||||
else [AlertSeverity.ERROR, AlertSeverity.WARNING]
|
||||
)
|
||||
email_severities = (
|
||||
alert_template.task_email_alert_severity
|
||||
if alert_template.task_email_alert_severity
|
||||
else [AlertSeverity.ERROR, AlertSeverity.WARNING]
|
||||
)
|
||||
text_severities = (
|
||||
alert_template.task_text_alert_severity
|
||||
if alert_template.task_text_alert_severity
|
||||
else [AlertSeverity.ERROR, AlertSeverity.WARNING]
|
||||
)
|
||||
dashboard_severities = alert_template.task_dashboard_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
email_severities = alert_template.task_email_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
text_severities = alert_template.task_text_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
always_dashboard = alert_template.task_always_alert
|
||||
always_email = alert_template.task_always_email
|
||||
always_text = alert_template.task_always_text
|
||||
@@ -417,7 +410,6 @@ class Alert(models.Model):
|
||||
|
||||
# create alert in dashboard if enabled
|
||||
if dashboard_alert or always_dashboard:
|
||||
|
||||
# check if alert template is set and specific severities are configured
|
||||
if (
|
||||
not alert_template
|
||||
@@ -430,7 +422,6 @@ class Alert(models.Model):
|
||||
|
||||
# send email if enabled
|
||||
if email_alert or always_email:
|
||||
|
||||
# check if alert template is set and specific severities are configured
|
||||
if (
|
||||
not alert_template
|
||||
@@ -445,7 +436,6 @@ class Alert(models.Model):
|
||||
|
||||
# send text if enabled
|
||||
if text_alert or always_text:
|
||||
|
||||
# check if alert template is set and specific severities are configured
|
||||
if (
|
||||
not alert_template
|
||||
@@ -462,14 +452,22 @@ class Alert(models.Model):
|
||||
and run_script_action
|
||||
and not alert.action_run
|
||||
):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.SCRIPT_RUN,
|
||||
script=alert_template.action,
|
||||
username="alert-action-failure",
|
||||
)
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
timeout=alert_template.action_timeout,
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
run_as_user=False,
|
||||
env_vars=alert_template.action_env_vars,
|
||||
)
|
||||
|
||||
# command was successful
|
||||
@@ -491,7 +489,7 @@ class Alert(models.Model):
|
||||
def handle_alert_resolve(
|
||||
cls, instance: Union[Agent, TaskResult, CheckResult]
|
||||
) -> None:
|
||||
from agents.models import Agent
|
||||
from agents.models import Agent, AgentHistory
|
||||
from autotasks.models import TaskResult
|
||||
from checks.models import CheckResult
|
||||
|
||||
@@ -585,14 +583,22 @@ class Alert(models.Model):
|
||||
and run_script_action
|
||||
and not alert.resolved_action_run
|
||||
):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.SCRIPT_RUN,
|
||||
script=alert_template.action,
|
||||
username="alert-action-resolved",
|
||||
)
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.resolved_action.pk,
|
||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
run_as_user=False,
|
||||
env_vars=alert_template.resolved_action_env_vars,
|
||||
)
|
||||
|
||||
# command was successful
|
||||
@@ -613,11 +619,10 @@ class Alert(models.Model):
|
||||
)
|
||||
|
||||
def parse_script_args(self, args: List[str]) -> List[str]:
|
||||
|
||||
if not args:
|
||||
return []
|
||||
|
||||
temp_args = list()
|
||||
temp_args = []
|
||||
# pattern to match for injection
|
||||
pattern = re.compile(".*\\{\\{alert\\.(.*)\\}\\}.*")
|
||||
|
||||
@@ -661,6 +666,12 @@ class AlertTemplate(BaseAuditModel):
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
action_env_vars = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
action_timeout = models.PositiveIntegerField(default=15)
|
||||
resolved_action = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
@@ -675,6 +686,12 @@ class AlertTemplate(BaseAuditModel):
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
resolved_action_env_vars = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
resolved_action_timeout = models.PositiveIntegerField(default=15)
|
||||
|
||||
# overrides the global recipients
|
||||
|
||||
@@ -32,7 +32,7 @@ def _has_perm_on_alert(user: "User", id: int) -> bool:
|
||||
|
||||
class AlertPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET" or r.method == "PATCH":
|
||||
if r.method in ("GET", "PATCH"):
|
||||
if "pk" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_alerts") and _has_perm_on_alert(
|
||||
r.user, view.kwargs["pk"]
|
||||
@@ -52,5 +52,5 @@ class AlertTemplatePerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_alerttemplates")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_alerttemplates")
|
||||
|
||||
return _has_perm(r, "can_manage_alerttemplates")
|
||||
|
||||
@@ -8,7 +8,6 @@ from .models import Alert, AlertTemplate
|
||||
|
||||
|
||||
class AlertSerializer(ModelSerializer):
|
||||
|
||||
hostname = ReadOnlyField(source="assigned_agent.hostname")
|
||||
agent_id = ReadOnlyField(source="assigned_agent.agent_id")
|
||||
client = ReadOnlyField(source="client.name")
|
||||
|
||||
@@ -8,7 +8,7 @@ from model_bakery import baker, seq
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import TaskResult
|
||||
from core.tasks import cache_db_fields_task, handle_resolved_stuff
|
||||
from core.tasks import cache_db_fields_task, resolve_alerts_task
|
||||
from core.utils import get_core_settings
|
||||
from tacticalrmm.constants import AgentMonType, AlertSeverity, AlertType, CheckStatus
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
@@ -389,7 +389,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
)
|
||||
|
||||
def test_agent_gets_correct_alert_template(self):
|
||||
|
||||
core = get_core_settings()
|
||||
# setup data
|
||||
workstation = baker.make_recipe(
|
||||
@@ -677,8 +676,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
agent_template_email = Agent.objects.get(pk=agent_template_email.pk)
|
||||
|
||||
# have the two agents checkin
|
||||
url = "/api/v3/checkin/"
|
||||
|
||||
agent_template_text.version = settings.LATEST_AGENT_VER
|
||||
agent_template_text.last_seen = djangotime.now()
|
||||
agent_template_text.save()
|
||||
@@ -688,7 +685,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
agent_template_email.save()
|
||||
|
||||
cache_db_fields_task()
|
||||
handle_resolved_stuff()
|
||||
resolve_alerts_task()
|
||||
|
||||
recovery_sms.assert_called_with(
|
||||
pk=Alert.objects.get(agent=agent_template_text).pk
|
||||
@@ -1373,7 +1370,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
def test_alert_actions(
|
||||
self, recovery_sms, recovery_email, outage_email, outage_sms, nats_cmd
|
||||
):
|
||||
|
||||
from agents.models import AgentHistory
|
||||
from agents.tasks import agent_outages_task
|
||||
|
||||
# Setup cmd mock
|
||||
@@ -1399,9 +1396,12 @@ class TestAlertTasks(TacticalTestCase):
|
||||
agent_script_actions=False,
|
||||
action=failure_action,
|
||||
action_timeout=30,
|
||||
action_args=["hello", "world"],
|
||||
action_env_vars=["hello=world", "foo=bar"],
|
||||
resolved_action=resolved_action,
|
||||
resolved_action_timeout=35,
|
||||
resolved_action_args=["nice_arg"],
|
||||
resolved_action_env_vars=["resolved=action", "env=vars"],
|
||||
)
|
||||
agent.client.alert_template = alert_template
|
||||
agent.client.save()
|
||||
@@ -1422,9 +1422,11 @@ class TestAlertTasks(TacticalTestCase):
|
||||
data = {
|
||||
"func": "runscriptfull",
|
||||
"timeout": 30,
|
||||
"script_args": [],
|
||||
"script_args": ["hello", "world"],
|
||||
"payload": {"code": failure_action.code, "shell": failure_action.shell},
|
||||
"run_as_user": False,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
"id": AgentHistory.objects.last().pk, # type: ignore
|
||||
}
|
||||
|
||||
nats_cmd.assert_called_with(data, timeout=30, wait=True)
|
||||
@@ -1445,7 +1447,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
agent.save()
|
||||
|
||||
cache_db_fields_task()
|
||||
handle_resolved_stuff()
|
||||
resolve_alerts_task()
|
||||
|
||||
# this is what data should be
|
||||
data = {
|
||||
@@ -1454,6 +1456,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"script_args": ["nice_arg"],
|
||||
"payload": {"code": resolved_action.code, "shell": resolved_action.shell},
|
||||
"run_as_user": False,
|
||||
"env_vars": ["resolved=action", "env=vars"],
|
||||
"id": AgentHistory.objects.last().pk, # type: ignore
|
||||
}
|
||||
|
||||
nats_cmd.assert_called_with(data, timeout=35, wait=True)
|
||||
@@ -1627,8 +1631,7 @@ class TestAlertPermissions(TacticalTestCase):
|
||||
unauthorized_task_url,
|
||||
]
|
||||
|
||||
for method in ["get", "put", "delete"]:
|
||||
|
||||
for method in ("get", "put", "delete"):
|
||||
# test superuser access
|
||||
for url in authorized_urls:
|
||||
self.check_authorized_superuser(method, url)
|
||||
|
||||
@@ -23,7 +23,6 @@ class GetAddAlerts(APIView):
|
||||
permission_classes = [IsAuthenticated, AlertPerms]
|
||||
|
||||
def patch(self, request):
|
||||
|
||||
# top 10 alerts for dashboard icon
|
||||
if "top" in request.data.keys():
|
||||
alerts = Alert.objects.filter(
|
||||
@@ -41,13 +40,13 @@ class GetAddAlerts(APIView):
|
||||
|
||||
elif any(
|
||||
key
|
||||
in [
|
||||
in (
|
||||
"timeFilter",
|
||||
"clientFilter",
|
||||
"severityFilter",
|
||||
"resolvedFilter",
|
||||
"snoozedFilter",
|
||||
]
|
||||
)
|
||||
for key in request.data.keys()
|
||||
):
|
||||
clientFilter = Q()
|
||||
|
||||
@@ -77,9 +77,7 @@ class TestAPIv3(TacticalTestCase):
|
||||
)
|
||||
|
||||
# add check to agent with check interval set
|
||||
check = baker.make_recipe(
|
||||
"checks.ping_check", agent=self.agent, run_interval=30
|
||||
)
|
||||
baker.make_recipe("checks.ping_check", agent=self.agent, run_interval=30)
|
||||
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
@@ -89,7 +87,7 @@ class TestAPIv3(TacticalTestCase):
|
||||
)
|
||||
|
||||
# minimum check run interval is 15 seconds
|
||||
check = baker.make_recipe("checks.ping_check", agent=self.agent, run_interval=5)
|
||||
baker.make_recipe("checks.ping_check", agent=self.agent, run_interval=5)
|
||||
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
@@ -129,8 +127,15 @@ class TestAPIv3(TacticalTestCase):
|
||||
"script": script.id,
|
||||
"script_args": ["test"],
|
||||
"timeout": 30,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
},
|
||||
{
|
||||
"type": "script",
|
||||
"script": 3,
|
||||
"script_args": [],
|
||||
"timeout": 30,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
},
|
||||
{"type": "script", "script": 3, "script_args": [], "timeout": 30},
|
||||
]
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
@@ -296,3 +301,9 @@ class TestAPIv3(TacticalTestCase):
|
||||
AgentCustomField.objects.get(field=multiple, agent=task.agent).value,
|
||||
["this"],
|
||||
)
|
||||
|
||||
def test_get_agent_config(self):
|
||||
agent = baker.make_recipe("agents.online_agent")
|
||||
url = f"/api/v3/{agent.agent_id}/config/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -19,4 +19,5 @@ urlpatterns = [
|
||||
path("superseded/", views.SupersededWinUpdate.as_view()),
|
||||
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
|
||||
path("<int:pk>/<str:agentid>/histresult/", views.AgentHistoryResult.as_view()),
|
||||
path("<str:agentid>/config/", views.AgentConfig.as_view()),
|
||||
]
|
||||
|
||||
25
api/tacticalrmm/apiv3/utils.py
Normal file
25
api/tacticalrmm/apiv3/utils.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import random
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from tacticalrmm.structs import AgentCheckInConfig
|
||||
|
||||
|
||||
def get_agent_config() -> AgentCheckInConfig:
|
||||
return AgentCheckInConfig(
|
||||
checkin_hello=random.randint(*getattr(settings, "CHECKIN_HELLO", (30, 60))),
|
||||
checkin_agentinfo=random.randint(
|
||||
*getattr(settings, "CHECKIN_AGENTINFO", (200, 400))
|
||||
),
|
||||
checkin_winsvc=random.randint(
|
||||
*getattr(settings, "CHECKIN_WINSVC", (2400, 3000))
|
||||
),
|
||||
checkin_pubip=random.randint(*getattr(settings, "CHECKIN_PUBIP", (300, 500))),
|
||||
checkin_disks=random.randint(*getattr(settings, "CHECKIN_DISKS", (1000, 2000))),
|
||||
checkin_sw=random.randint(*getattr(settings, "CHECKIN_SW", (2800, 3500))),
|
||||
checkin_wmi=random.randint(*getattr(settings, "CHECKIN_WMI", (3000, 4000))),
|
||||
checkin_syncmesh=random.randint(
|
||||
*getattr(settings, "CHECKIN_SYNCMESH", (800, 1200))
|
||||
),
|
||||
limit_data=getattr(settings, "LIMIT_DATA", False),
|
||||
)
|
||||
@@ -14,6 +14,7 @@ from rest_framework.views import APIView
|
||||
from accounts.models import User
|
||||
from agents.models import Agent, AgentHistory
|
||||
from agents.serializers import AgentHistorySerializer
|
||||
from apiv3.utils import get_agent_config
|
||||
from autotasks.models import AutomatedTask, TaskResult
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskResultSerializer
|
||||
from checks.constants import CHECK_DEFER, CHECK_RESULT_DEFER
|
||||
@@ -24,6 +25,7 @@ from core.utils import (
|
||||
get_core_settings,
|
||||
get_mesh_device_id,
|
||||
get_mesh_ws_url,
|
||||
get_meshagent_url,
|
||||
)
|
||||
from logs.models import DebugLog, PendingAction
|
||||
from software.models import InstalledSoftware
|
||||
@@ -45,7 +47,6 @@ from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
|
||||
class CheckIn(APIView):
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
@@ -253,9 +254,7 @@ class CheckRunner(APIView):
|
||||
check.check_result.last_run
|
||||
< djangotime.now()
|
||||
- djangotime.timedelta(
|
||||
seconds=check.run_interval
|
||||
if check.run_interval
|
||||
else agent.check_interval
|
||||
seconds=check.run_interval or agent.check_interval
|
||||
)
|
||||
)
|
||||
]
|
||||
@@ -365,7 +364,6 @@ class TaskRunner(APIView):
|
||||
# check if task is a collector and update the custom field
|
||||
if task.custom_field:
|
||||
if not task_result.stderr:
|
||||
|
||||
task_result.save_collector_results()
|
||||
|
||||
status = CheckStatus.PASSING
|
||||
@@ -398,26 +396,33 @@ class MeshExe(APIView):
|
||||
def post(self, request):
|
||||
match request.data:
|
||||
case {"goarch": GoArch.AMD64, "plat": AgentPlat.WINDOWS}:
|
||||
arch = MeshAgentIdent.WIN64
|
||||
ident = MeshAgentIdent.WIN64
|
||||
case {"goarch": GoArch.i386, "plat": AgentPlat.WINDOWS}:
|
||||
arch = MeshAgentIdent.WIN32
|
||||
ident = MeshAgentIdent.WIN32
|
||||
case {"goarch": GoArch.AMD64, "plat": AgentPlat.DARWIN} | {
|
||||
"goarch": GoArch.ARM64,
|
||||
"plat": AgentPlat.DARWIN,
|
||||
}:
|
||||
ident = MeshAgentIdent.DARWIN_UNIVERSAL
|
||||
case _:
|
||||
return notify_error("Arch not specified")
|
||||
return notify_error("Arch not supported")
|
||||
|
||||
core = get_core_settings()
|
||||
|
||||
try:
|
||||
uri = get_mesh_ws_url()
|
||||
mesh_id = asyncio.run(get_mesh_device_id(uri, core.mesh_device_group))
|
||||
mesh_device_id: str = asyncio.run(
|
||||
get_mesh_device_id(uri, core.mesh_device_group)
|
||||
)
|
||||
except:
|
||||
return notify_error("Unable to connect to mesh to get group id information")
|
||||
|
||||
if settings.DOCKER_BUILD:
|
||||
dl_url = f"{settings.MESH_WS_URL.replace('ws://', 'http://')}/meshagents?id={arch}&meshid={mesh_id}&installflags=0"
|
||||
else:
|
||||
dl_url = (
|
||||
f"{core.mesh_site}/meshagents?id={arch}&meshid={mesh_id}&installflags=0"
|
||||
)
|
||||
dl_url = get_meshagent_url(
|
||||
ident=ident,
|
||||
plat=request.data["plat"],
|
||||
mesh_site=core.mesh_site,
|
||||
mesh_device_id=mesh_device_id,
|
||||
)
|
||||
|
||||
try:
|
||||
return download_mesh_agent(dl_url)
|
||||
@@ -508,7 +513,7 @@ class Installer(APIView):
|
||||
ver = request.data["version"]
|
||||
if (
|
||||
pyver.parse(ver) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
and not "-dev" in settings.LATEST_AGENT_VER
|
||||
and "-dev" not in settings.LATEST_AGENT_VER
|
||||
):
|
||||
return notify_error(
|
||||
f"Old installer detected (version {ver} ). Latest version is {settings.LATEST_AGENT_VER} Please generate a new installer from the RMM"
|
||||
@@ -561,3 +566,12 @@ class AgentHistoryResult(APIView):
|
||||
s.is_valid(raise_exception=True)
|
||||
s.save()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class AgentConfig(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
ret = get_agent_config()
|
||||
return Response(ret._to_dict())
|
||||
|
||||
@@ -216,16 +216,15 @@ class Policy(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def get_policy_tasks(agent: "Agent") -> "List[AutomatedTask]":
|
||||
|
||||
# List of all tasks to be applied
|
||||
tasks = list()
|
||||
tasks = []
|
||||
|
||||
# Get policies applied to agent and agent site and client
|
||||
policies = agent.get_agent_policies()
|
||||
|
||||
processed_policies = list()
|
||||
processed_policies = []
|
||||
|
||||
for _, policy in policies.items():
|
||||
for policy in policies.values():
|
||||
if policy and policy.active and policy.pk not in processed_policies:
|
||||
processed_policies.append(policy.pk)
|
||||
for task in policy.autotasks.all():
|
||||
@@ -235,7 +234,6 @@ class Policy(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def get_policy_checks(agent: "Agent") -> "List[Check]":
|
||||
|
||||
# Get checks added to agent directly
|
||||
agent_checks = list(agent.agentchecks.all())
|
||||
|
||||
@@ -244,12 +242,12 @@ class Policy(BaseAuditModel):
|
||||
|
||||
# Used to hold the policies that will be applied and the order in which they are applied
|
||||
# Enforced policies are applied first
|
||||
enforced_checks = list()
|
||||
policy_checks = list()
|
||||
enforced_checks = []
|
||||
policy_checks = []
|
||||
|
||||
processed_policies = list()
|
||||
processed_policies = []
|
||||
|
||||
for _, policy in policies.items():
|
||||
for policy in policies.values():
|
||||
if policy and policy.active and policy.pk not in processed_policies:
|
||||
processed_policies.append(policy.pk)
|
||||
if policy.enforced:
|
||||
@@ -263,24 +261,24 @@ class Policy(BaseAuditModel):
|
||||
return []
|
||||
|
||||
# Sorted Checks already added
|
||||
added_diskspace_checks: List[str] = list()
|
||||
added_ping_checks: List[str] = list()
|
||||
added_winsvc_checks: List[str] = list()
|
||||
added_script_checks: List[int] = list()
|
||||
added_eventlog_checks: List[List[str]] = list()
|
||||
added_cpuload_checks: List[int] = list()
|
||||
added_memory_checks: List[int] = list()
|
||||
added_diskspace_checks: List[str] = []
|
||||
added_ping_checks: List[str] = []
|
||||
added_winsvc_checks: List[str] = []
|
||||
added_script_checks: List[int] = []
|
||||
added_eventlog_checks: List[List[str]] = []
|
||||
added_cpuload_checks: List[int] = []
|
||||
added_memory_checks: List[int] = []
|
||||
|
||||
# Lists all agent and policy checks that will be returned
|
||||
diskspace_checks: "List[Check]" = list()
|
||||
ping_checks: "List[Check]" = list()
|
||||
winsvc_checks: "List[Check]" = list()
|
||||
script_checks: "List[Check]" = list()
|
||||
eventlog_checks: "List[Check]" = list()
|
||||
cpuload_checks: "List[Check]" = list()
|
||||
memory_checks: "List[Check]" = list()
|
||||
diskspace_checks: "List[Check]" = []
|
||||
ping_checks: "List[Check]" = []
|
||||
winsvc_checks: "List[Check]" = []
|
||||
script_checks: "List[Check]" = []
|
||||
eventlog_checks: "List[Check]" = []
|
||||
cpuload_checks: "List[Check]" = []
|
||||
memory_checks: "List[Check]" = []
|
||||
|
||||
overridden_checks: List[int] = list()
|
||||
overridden_checks: List[int] = []
|
||||
|
||||
# Loop over checks in with enforced policies first, then non-enforced policies
|
||||
for check in enforced_checks + agent_checks + policy_checks:
|
||||
|
||||
@@ -7,5 +7,5 @@ class AutomationPolicyPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_automation_policies")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_automation_policies")
|
||||
|
||||
return _has_perm(r, "can_manage_automation_policies")
|
||||
|
||||
@@ -87,7 +87,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
"copyId": policy.pk,
|
||||
}
|
||||
|
||||
resp = self.client.post(f"/automation/policies/", data, format="json")
|
||||
resp = self.client.post("/automation/policies/", data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
copied_policy = Policy.objects.get(name=data["name"])
|
||||
@@ -221,7 +221,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_policy_task_status(self):
|
||||
|
||||
# policy with a task
|
||||
policy = baker.make("automation.Policy")
|
||||
agent = baker.make_recipe("agents.agent", policy=policy)
|
||||
@@ -240,7 +239,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
@patch("automation.tasks.run_win_policy_autotasks_task.delay")
|
||||
def test_run_win_task(self, mock_task):
|
||||
|
||||
policy = baker.make("automation.Policy")
|
||||
# create managed policy tasks
|
||||
task = baker.make_recipe("autotasks.task", policy=policy)
|
||||
@@ -283,7 +281,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_update_patch_policy(self):
|
||||
|
||||
# test policy doesn't exist
|
||||
resp = self.client.put("/automation/patchpolicy/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
@@ -396,7 +393,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_policy_related(self):
|
||||
|
||||
# Get Site and Client from an agent in list
|
||||
clients = baker.make("clients.Client", _quantity=5)
|
||||
sites = baker.make("clients.Site", client=cycle(clients), _quantity=25)
|
||||
@@ -447,7 +443,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEqual(len(resp.data["agents"]), 2)
|
||||
|
||||
def test_getting_agent_policy_checks(self):
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
self.create_checks(parent=policy)
|
||||
@@ -536,7 +531,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
)
|
||||
|
||||
def test_policy_exclusions(self):
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
baker.make_recipe("checks.memory_check", policy=policy)
|
||||
|
||||
@@ -84,7 +84,6 @@ class GetUpdateDeletePolicy(APIView):
|
||||
|
||||
|
||||
class PolicyAutoTask(APIView):
|
||||
|
||||
# get status of all tasks
|
||||
def get(self, request, task):
|
||||
tasks = TaskResult.objects.filter(task=task)
|
||||
@@ -108,7 +107,6 @@ class PolicyCheck(APIView):
|
||||
|
||||
class OverviewPolicy(APIView):
|
||||
def get(self, request):
|
||||
|
||||
clients = (
|
||||
Client.objects.filter_by_role(request.user)
|
||||
.select_related("workstation_policy", "server_policy")
|
||||
@@ -127,7 +125,6 @@ class OverviewPolicy(APIView):
|
||||
|
||||
class GetRelated(APIView):
|
||||
def get(self, request, pk):
|
||||
|
||||
policy = (
|
||||
Policy.objects.filter(pk=pk)
|
||||
.prefetch_related(
|
||||
@@ -146,6 +143,7 @@ class GetRelated(APIView):
|
||||
|
||||
class UpdatePatchPolicy(APIView):
|
||||
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||
|
||||
# create new patch policy
|
||||
def post(self, request):
|
||||
policy = get_object_or_404(Policy, pk=request.data["policy"])
|
||||
@@ -179,7 +177,6 @@ class UpdatePatchPolicy(APIView):
|
||||
class ResetPatchPolicy(APIView):
|
||||
# bulk reset agent patch policy
|
||||
def post(self, request):
|
||||
|
||||
if "client" in request.data:
|
||||
if not _has_perm_on_client(request.user, request.data["client"]):
|
||||
raise PermissionDenied()
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def migrate_env_vars(apps, schema_editor):
|
||||
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
|
||||
for task in AutomatedTask.objects.iterator(chunk_size=30):
|
||||
try:
|
||||
tmp = []
|
||||
if isinstance(task.actions, list) and task.actions:
|
||||
for t in task.actions:
|
||||
if isinstance(t, dict):
|
||||
if t["type"] == "script":
|
||||
try:
|
||||
t["env_vars"]
|
||||
except KeyError:
|
||||
t["env_vars"] = []
|
||||
tmp.append(t)
|
||||
if tmp:
|
||||
task.actions = tmp
|
||||
task.save(update_fields=["actions"])
|
||||
except Exception as e:
|
||||
print(f"ERROR: {e}")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0037_alter_taskresult_retcode"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_env_vars),
|
||||
]
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import random
|
||||
import string
|
||||
from contextlib import suppress
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
|
||||
|
||||
import pytz
|
||||
@@ -70,7 +71,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
# format -> [{"type": "script", "script": 1, "name": "Script Name", "timeout": 90, "script_args": []}, {"type": "cmd", "command": "whoami", "timeout": 90}]
|
||||
# format -> [{"type": "script", "script": 1, "name": "Script Name", "timeout": 90, "script_args": [], "env_vars": []}, {"type": "cmd", "command": "whoami", "timeout": 90}]
|
||||
actions = JSONField(default=list)
|
||||
assigned_check = models.ForeignKey(
|
||||
"checks.Check",
|
||||
@@ -141,7 +142,6 @@ class AutomatedTask(BaseAuditModel):
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs) -> None:
|
||||
|
||||
# if task is a policy task clear cache on everything
|
||||
if self.policy:
|
||||
cache.delete_many_pattern("site_*_tasks")
|
||||
@@ -167,7 +167,6 @@ class AutomatedTask(BaseAuditModel):
|
||||
)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
|
||||
# if task is a policy task clear cache on everything
|
||||
if self.policy:
|
||||
cache.delete_many_pattern("site_*_tasks")
|
||||
@@ -225,15 +224,13 @@ class AutomatedTask(BaseAuditModel):
|
||||
def create_policy_task(
|
||||
self, policy: "Policy", assigned_check: "Optional[Check]" = None
|
||||
) -> None:
|
||||
### Copies certain properties on this task (self) to a new task and sets it to the supplied Policy
|
||||
fields_to_copy = POLICY_TASK_FIELDS_TO_COPY
|
||||
|
||||
# Copies certain properties on this task (self) to a new task and sets it to the supplied Policy
|
||||
task = AutomatedTask.objects.create(
|
||||
policy=policy,
|
||||
assigned_check=assigned_check,
|
||||
)
|
||||
|
||||
for field in fields_to_copy:
|
||||
for field in POLICY_TASK_FIELDS_TO_COPY:
|
||||
setattr(task, field, getattr(self, field))
|
||||
|
||||
task.save()
|
||||
@@ -251,9 +248,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
"trigger": self.task_type
|
||||
if self.task_type != TaskType.CHECK_FAILURE
|
||||
else TaskType.MANUAL,
|
||||
"multiple_instances": self.task_instance_policy
|
||||
if self.task_instance_policy
|
||||
else 0,
|
||||
"multiple_instances": self.task_instance_policy or 0,
|
||||
"delete_expired_task_after": self.remove_if_not_scheduled
|
||||
if self.expire_date
|
||||
else False,
|
||||
@@ -262,13 +257,13 @@ class AutomatedTask(BaseAuditModel):
|
||||
else True,
|
||||
}
|
||||
|
||||
if self.task_type in [
|
||||
if self.task_type in (
|
||||
TaskType.RUN_ONCE,
|
||||
TaskType.DAILY,
|
||||
TaskType.WEEKLY,
|
||||
TaskType.MONTHLY,
|
||||
TaskType.MONTHLY_DOW,
|
||||
]:
|
||||
):
|
||||
# set runonce task in future if creating and run_asap_after_missed is set
|
||||
if (
|
||||
not editing
|
||||
@@ -315,7 +310,6 @@ class AutomatedTask(BaseAuditModel):
|
||||
task["days_of_week"] = self.run_time_bit_weekdays
|
||||
|
||||
elif self.task_type == TaskType.MONTHLY:
|
||||
|
||||
# check if "last day is configured"
|
||||
if self.monthly_days_of_month >= 0x80000000:
|
||||
task["days_of_month"] = self.monthly_days_of_month - 0x80000000
|
||||
@@ -432,10 +426,8 @@ class AutomatedTask(BaseAuditModel):
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
task_result.sync_status = TaskSyncStatus.PENDING_DELETION
|
||||
|
||||
try:
|
||||
with suppress(DatabaseError):
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
except DatabaseError:
|
||||
pass
|
||||
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
@@ -532,7 +524,6 @@ class TaskResult(models.Model):
|
||||
)
|
||||
|
||||
def save_collector_results(self) -> None:
|
||||
|
||||
agent_field = self.task.custom_field.get_or_create_field_value(self.agent)
|
||||
|
||||
value = (
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
from datetime import datetime
|
||||
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework import serializers
|
||||
|
||||
from scripts.models import Script
|
||||
@@ -14,7 +17,6 @@ class TaskResultSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class TaskSerializer(serializers.ModelSerializer):
|
||||
|
||||
check_name = serializers.ReadOnlyField(source="assigned_check.readable_desc")
|
||||
schedule = serializers.ReadOnlyField()
|
||||
alert_template = serializers.SerializerMethodField()
|
||||
@@ -30,52 +32,49 @@ class TaskSerializer(serializers.ModelSerializer):
|
||||
)
|
||||
|
||||
def validate_actions(self, value):
|
||||
|
||||
if not value:
|
||||
raise serializers.ValidationError(
|
||||
f"There must be at least one action configured"
|
||||
"There must be at least one action configured"
|
||||
)
|
||||
|
||||
for action in value:
|
||||
if "type" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"Each action must have a type field of either 'script' or 'cmd'"
|
||||
"Each action must have a type field of either 'script' or 'cmd'"
|
||||
)
|
||||
|
||||
if action["type"] == "script":
|
||||
if "script" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"A script action type must have a 'script' field with primary key of script"
|
||||
"A script action type must have a 'script' field with primary key of script"
|
||||
)
|
||||
|
||||
if "script_args" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"A script action type must have a 'script_args' field with an array of arguments"
|
||||
"A script action type must have a 'script_args' field with an array of arguments"
|
||||
)
|
||||
|
||||
if "timeout" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"A script action type must have a 'timeout' field"
|
||||
"A script action type must have a 'timeout' field"
|
||||
)
|
||||
|
||||
if action["type"] == "cmd":
|
||||
if "command" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"A command action type must have a 'command' field"
|
||||
"A command action type must have a 'command' field"
|
||||
)
|
||||
|
||||
if "timeout" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"A command action type must have a 'timeout' field"
|
||||
"A command action type must have a 'timeout' field"
|
||||
)
|
||||
|
||||
return value
|
||||
|
||||
def validate(self, data):
|
||||
|
||||
# allow editing with task_type not specified
|
||||
if self.instance and "task_type" not in data:
|
||||
|
||||
# remove schedule related fields from data
|
||||
if "run_time_date" in data:
|
||||
del data["run_time_date"]
|
||||
@@ -97,16 +96,23 @@ class TaskSerializer(serializers.ModelSerializer):
|
||||
del data["assigned_check"]
|
||||
return data
|
||||
|
||||
if (
|
||||
"expire_date" in data
|
||||
and isinstance(data["expire_date"], datetime)
|
||||
and djangotime.now() > data["expire_date"]
|
||||
):
|
||||
raise serializers.ValidationError("Expires date/time is in the past")
|
||||
|
||||
# run_time_date required
|
||||
if (
|
||||
data["task_type"]
|
||||
in [
|
||||
in (
|
||||
TaskType.RUN_ONCE,
|
||||
TaskType.DAILY,
|
||||
TaskType.WEEKLY,
|
||||
TaskType.MONTHLY,
|
||||
TaskType.MONTHLY_DOW,
|
||||
]
|
||||
)
|
||||
and not data["run_time_date"]
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
@@ -180,7 +186,6 @@ class TaskSerializer(serializers.ModelSerializer):
|
||||
return data
|
||||
|
||||
def get_alert_template(self, obj):
|
||||
|
||||
if obj.agent:
|
||||
alert_template = obj.agent.alert_template
|
||||
else:
|
||||
@@ -188,13 +193,12 @@ class TaskSerializer(serializers.ModelSerializer):
|
||||
|
||||
if not alert_template:
|
||||
return None
|
||||
else:
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.task_always_email,
|
||||
"always_text": alert_template.task_always_text,
|
||||
"always_alert": alert_template.task_always_alert,
|
||||
}
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.task_always_email,
|
||||
"always_text": alert_template.task_always_text,
|
||||
"always_alert": alert_template.task_always_alert,
|
||||
}
|
||||
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
@@ -229,6 +233,12 @@ class TaskGOGetSerializer(serializers.ModelSerializer):
|
||||
# script doesn't exist so remove it
|
||||
actions_to_remove.append(action["script"])
|
||||
continue
|
||||
# wrote a custom migration for env_vars but leaving this just in case.
|
||||
# can be removed later
|
||||
try:
|
||||
env_vars = action["env_vars"]
|
||||
except KeyError:
|
||||
env_vars = []
|
||||
tmp.append(
|
||||
{
|
||||
"type": "script",
|
||||
@@ -242,6 +252,7 @@ class TaskGOGetSerializer(serializers.ModelSerializer):
|
||||
"shell": script.shell,
|
||||
"timeout": action["timeout"],
|
||||
"run_as_user": script.run_as_user,
|
||||
"env_vars": env_vars,
|
||||
}
|
||||
)
|
||||
if actions_to_remove:
|
||||
|
||||
@@ -1,134 +1,162 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import random
|
||||
from collections import namedtuple
|
||||
from contextlib import suppress
|
||||
from time import sleep
|
||||
from typing import Optional, Union
|
||||
from typing import TYPE_CHECKING, Optional, Union
|
||||
|
||||
import msgpack
|
||||
import nats
|
||||
from django.utils import timezone as djangotime
|
||||
from nats.errors import TimeoutError
|
||||
|
||||
from agents.models import Agent
|
||||
from alerts.models import Alert
|
||||
from autotasks.models import AutomatedTask, TaskResult
|
||||
from logs.models import DebugLog
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.constants import DebugLogType
|
||||
from tacticalrmm.constants import AGENT_STATUS_ONLINE, ORPHANED_WIN_TASK_LOCK
|
||||
from tacticalrmm.helpers import rand_range, setup_nats_options
|
||||
from tacticalrmm.utils import redis_lock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from nats.aio.client import Client as NATSClient
|
||||
|
||||
|
||||
@app.task
|
||||
def create_win_task_schedule(pk: int, agent_id: Optional[str] = None) -> str:
|
||||
try:
|
||||
with suppress(
|
||||
AutomatedTask.DoesNotExist,
|
||||
Agent.DoesNotExist,
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if agent_id:
|
||||
task.create_task_on_agent(Agent.objects.get(agent_id=agent_id))
|
||||
else:
|
||||
task.create_task_on_agent()
|
||||
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
|
||||
pass
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def modify_win_task(pk: int, agent_id: Optional[str] = None) -> str:
|
||||
try:
|
||||
with suppress(
|
||||
AutomatedTask.DoesNotExist,
|
||||
Agent.DoesNotExist,
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if agent_id:
|
||||
task.modify_task_on_agent(Agent.objects.get(agent_id=agent_id))
|
||||
else:
|
||||
task.modify_task_on_agent()
|
||||
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
|
||||
pass
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def delete_win_task_schedule(pk: int, agent_id: Optional[str] = None) -> str:
|
||||
try:
|
||||
with suppress(
|
||||
AutomatedTask.DoesNotExist,
|
||||
Agent.DoesNotExist,
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if agent_id:
|
||||
task.delete_task_on_agent(Agent.objects.get(agent_id=agent_id))
|
||||
else:
|
||||
task.delete_task_on_agent()
|
||||
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
|
||||
pass
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def run_win_task(pk: int, agent_id: Optional[str] = None) -> str:
|
||||
try:
|
||||
with suppress(
|
||||
AutomatedTask.DoesNotExist,
|
||||
Agent.DoesNotExist,
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if agent_id:
|
||||
task.run_win_task(Agent.objects.get(agent_id=agent_id))
|
||||
else:
|
||||
task.run_win_task()
|
||||
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
|
||||
pass
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def remove_orphaned_win_tasks() -> None:
|
||||
from agents.models import Agent
|
||||
@app.task(bind=True)
|
||||
def remove_orphaned_win_tasks(self) -> str:
|
||||
with redis_lock(ORPHANED_WIN_TASK_LOCK, self.app.oid) as acquired:
|
||||
if not acquired:
|
||||
return f"{self.app.oid} still running"
|
||||
|
||||
for agent in Agent.online_agents():
|
||||
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
|
||||
from core.tasks import _get_agent_qs
|
||||
|
||||
if not isinstance(r, list): # empty list
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"Unable to pull list of scheduled tasks on {agent.hostname}: {r}",
|
||||
)
|
||||
continue
|
||||
AgentTup = namedtuple("AgentTup", ["agent_id", "task_names"])
|
||||
items: "list[AgentTup]" = []
|
||||
exclude_tasks = ("TacticalRMM_SchedReboot",)
|
||||
|
||||
agent_task_names = [
|
||||
task.win_task_name for task in agent.get_tasks_with_policies()
|
||||
]
|
||||
for agent in _get_agent_qs():
|
||||
if agent.status == AGENT_STATUS_ONLINE:
|
||||
names = [task.win_task_name for task in agent.get_tasks_with_policies()]
|
||||
items.append(AgentTup._make([agent.agent_id, names]))
|
||||
|
||||
exclude_tasks = (
|
||||
"TacticalRMM_fixmesh",
|
||||
"TacticalRMM_SchedReboot",
|
||||
"TacticalRMM_sync",
|
||||
"TacticalRMM_agentupdate",
|
||||
)
|
||||
async def _handle_task(nc: "NATSClient", sub, data, names) -> str:
|
||||
try:
|
||||
msg = await nc.request(
|
||||
subject=sub, payload=msgpack.dumps(data), timeout=5
|
||||
)
|
||||
except TimeoutError:
|
||||
return "timeout"
|
||||
|
||||
for task in r:
|
||||
if task.startswith(exclude_tasks):
|
||||
# skip system tasks or any pending reboots
|
||||
continue
|
||||
try:
|
||||
r = msgpack.loads(msg.data)
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
|
||||
if task.startswith("TacticalRMM_") and task not in agent_task_names:
|
||||
# delete task since it doesn't exist in UI
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": task},
|
||||
}
|
||||
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if ret != "ok":
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}",
|
||||
)
|
||||
else:
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"Removed orphaned task {task} from {agent.hostname}",
|
||||
)
|
||||
if not isinstance(r, list):
|
||||
return "notlist"
|
||||
|
||||
for name in r:
|
||||
if name.startswith(exclude_tasks):
|
||||
# skip system tasks or any pending reboots
|
||||
continue
|
||||
|
||||
if name.startswith("TacticalRMM_") and name not in names:
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": name},
|
||||
}
|
||||
print(f"Deleting orphaned task: {name} on agent {sub}")
|
||||
await nc.publish(subject=sub, payload=msgpack.dumps(nats_data))
|
||||
|
||||
return "ok"
|
||||
|
||||
async def _run() -> None:
|
||||
opts = setup_nats_options()
|
||||
try:
|
||||
nc = await nats.connect(**opts)
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
|
||||
payload = {"func": "listschedtasks"}
|
||||
tasks = [
|
||||
_handle_task(
|
||||
nc=nc, sub=item.agent_id, data=payload, names=item.task_names
|
||||
)
|
||||
for item in items
|
||||
]
|
||||
await asyncio.gather(*tasks)
|
||||
await nc.close()
|
||||
|
||||
asyncio.run(_run())
|
||||
return "completed"
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||
|
||||
try:
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
except Alert.DoesNotExist:
|
||||
@@ -139,7 +167,7 @@ def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None)
|
||||
task_result = TaskResult.objects.get(
|
||||
task=alert.assigned_task, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 5))
|
||||
sleep(rand_range(100, 1500))
|
||||
task_result.send_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
@@ -151,7 +179,7 @@ def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None)
|
||||
task_result = TaskResult.objects.get(
|
||||
task=alert.assigned_task, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 5))
|
||||
sleep(rand_range(100, 1500))
|
||||
task_result.send_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
@@ -161,7 +189,6 @@ def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None)
|
||||
|
||||
@app.task
|
||||
def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||
|
||||
try:
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
except Alert.DoesNotExist:
|
||||
@@ -172,7 +199,7 @@ def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) ->
|
||||
task_result = TaskResult.objects.get(
|
||||
task=alert.assigned_task, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 3))
|
||||
sleep(rand_range(100, 1500))
|
||||
task_result.send_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
@@ -184,7 +211,7 @@ def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) ->
|
||||
task_result = TaskResult.objects.get(
|
||||
task=alert.assigned_task, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 3))
|
||||
sleep(rand_range(100, 1500))
|
||||
task_result.send_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
@@ -194,7 +221,6 @@ def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) ->
|
||||
|
||||
@app.task
|
||||
def handle_resolved_task_sms_alert(pk: int) -> str:
|
||||
|
||||
try:
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
except Alert.DoesNotExist:
|
||||
@@ -205,7 +231,7 @@ def handle_resolved_task_sms_alert(pk: int) -> str:
|
||||
task_result = TaskResult.objects.get(
|
||||
task=alert.assigned_task, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 3))
|
||||
sleep(rand_range(100, 1500))
|
||||
task_result.send_resolved_sms()
|
||||
alert.resolved_sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_sms_sent"])
|
||||
@@ -215,7 +241,6 @@ def handle_resolved_task_sms_alert(pk: int) -> str:
|
||||
|
||||
@app.task
|
||||
def handle_resolved_task_email_alert(pk: int) -> str:
|
||||
|
||||
try:
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
except Alert.DoesNotExist:
|
||||
@@ -226,7 +251,7 @@ def handle_resolved_task_email_alert(pk: int) -> str:
|
||||
task_result = TaskResult.objects.get(
|
||||
task=alert.assigned_task, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 5))
|
||||
sleep(rand_range(100, 1500))
|
||||
task_result.send_resolved_email()
|
||||
alert.resolved_email_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_email_sent"])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from unittest.mock import call, patch
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker
|
||||
@@ -8,7 +8,7 @@ from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import AutomatedTask, TaskResult, TaskSyncStatus
|
||||
from .serializers import TaskSerializer
|
||||
from .tasks import create_win_task_schedule, remove_orphaned_win_tasks, run_win_task
|
||||
from .tasks import create_win_task_schedule, run_win_task
|
||||
|
||||
base_url = "/tasks"
|
||||
|
||||
@@ -51,7 +51,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
# setup data
|
||||
script = baker.make_recipe("scripts.script")
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
policy = baker.make("automation.Policy")
|
||||
policy = baker.make("automation.Policy") # noqa
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
custom_field = baker.make("core.CustomField")
|
||||
|
||||
@@ -137,7 +137,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
"weekly_interval": 2,
|
||||
"run_time_bit_weekdays": 26,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"expire_date": djangotime.now() + djangotime.timedelta(weeks=5),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
@@ -160,7 +160,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
"monthly_months_of_year": 56,
|
||||
"monthly_days_of_month": 350,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"expire_date": djangotime.now() + djangotime.timedelta(weeks=5),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
@@ -183,7 +183,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
"monthly_weeks_of_month": 4,
|
||||
"run_time_bit_weekdays": 15,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"expire_date": djangotime.now() + djangotime.timedelta(weeks=5),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
@@ -206,7 +206,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
"monthly_weeks_of_month": 4,
|
||||
"run_time_bit_weekdays": 15,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"expire_date": djangotime.now() + djangotime.timedelta(weeks=5),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
@@ -238,7 +238,6 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_autotask(self):
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
@@ -258,7 +257,9 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
policy = baker.make("automation.Policy")
|
||||
policy_task = baker.make("autotasks.AutomatedTask", enabled=True, policy=policy)
|
||||
policy_task = baker.make( # noqa
|
||||
"autotasks.AutomatedTask", enabled=True, policy=policy
|
||||
)
|
||||
custom_field = baker.make("core.CustomField")
|
||||
script = baker.make("scripts.Script")
|
||||
|
||||
@@ -294,7 +295,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
"monthly_weeks_of_month": 4,
|
||||
"run_time_bit_weekdays": 15,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"expire_date": djangotime.now() + djangotime.timedelta(weeks=5),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
@@ -314,7 +315,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
"monthly_weeks_of_month": 4,
|
||||
"run_time_bit_weekdays": 15,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"expire_date": djangotime.now() + djangotime.timedelta(weeks=5),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
@@ -381,60 +382,6 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_remove_orphaned_win_task(self, nats_cmd):
|
||||
agent = baker.make_recipe("agents.online_agent")
|
||||
baker.make_recipe("agents.offline_agent")
|
||||
task1 = AutomatedTask.objects.create(
|
||||
agent=agent,
|
||||
name="test task 1",
|
||||
)
|
||||
|
||||
# test removing an orphaned task
|
||||
win_tasks = [
|
||||
"Adobe Acrobat Update Task",
|
||||
"AdobeGCInvoker-1.0",
|
||||
"GoogleUpdateTaskMachineCore",
|
||||
"GoogleUpdateTaskMachineUA",
|
||||
"OneDrive Standalone Update Task-S-1-5-21-717461175-241712648-1206041384-1001",
|
||||
task1.win_task_name,
|
||||
"TacticalRMM_fixmesh",
|
||||
"TacticalRMM_SchedReboot_jk324kajd",
|
||||
"TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb", # orphaned task
|
||||
]
|
||||
|
||||
calls = [
|
||||
call({"func": "listschedtasks"}, timeout=10),
|
||||
call(
|
||||
{
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": "TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb"
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
),
|
||||
]
|
||||
|
||||
nats_cmd.side_effect = [win_tasks, "ok"]
|
||||
remove_orphaned_win_tasks()
|
||||
self.assertEqual(nats_cmd.call_count, 2)
|
||||
nats_cmd.assert_has_calls(calls)
|
||||
|
||||
# test nats delete task fail
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.side_effect = [win_tasks, "error deleting task"]
|
||||
remove_orphaned_win_tasks()
|
||||
nats_cmd.assert_has_calls(calls)
|
||||
self.assertEqual(nats_cmd.call_count, 2)
|
||||
|
||||
# no orphaned tasks
|
||||
nats_cmd.reset_mock()
|
||||
win_tasks.remove("TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb")
|
||||
nats_cmd.side_effect = [win_tasks, "ok"]
|
||||
remove_orphaned_win_tasks()
|
||||
self.assertEqual(nats_cmd.call_count, 1)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_win_task(self, nats_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
@@ -766,12 +713,14 @@ class TestTaskPermissions(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
policy = baker.make("automation.Policy")
|
||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent, _quantity=5)
|
||||
unauthorized_task = baker.make(
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent, _quantity=5) # noqa
|
||||
unauthorized_task = baker.make( # noqa
|
||||
"autotasks.AutomatedTask", agent=unauthorized_agent, _quantity=7
|
||||
)
|
||||
|
||||
policy_tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=2)
|
||||
policy_tasks = baker.make( # noqa
|
||||
"autotasks.AutomatedTask", policy=policy, _quantity=2
|
||||
)
|
||||
|
||||
# test super user access
|
||||
self.check_authorized_superuser("get", f"{base_url}/")
|
||||
@@ -864,7 +813,7 @@ class TestTaskPermissions(TacticalTestCase):
|
||||
|
||||
url = f"{base_url}/"
|
||||
|
||||
for data in [policy_data, agent_data]:
|
||||
for data in (policy_data, agent_data):
|
||||
# test superuser access
|
||||
self.check_authorized_superuser("post", url, data)
|
||||
|
||||
@@ -900,8 +849,7 @@ class TestTaskPermissions(TacticalTestCase):
|
||||
)
|
||||
policy_task = baker.make("autotasks.AutomatedTask", policy=policy)
|
||||
|
||||
for method in ["get", "put", "delete"]:
|
||||
|
||||
for method in ("get", "put", "delete"):
|
||||
url = f"{base_url}/{task.id}/"
|
||||
unauthorized_url = f"{base_url}/{unauthorized_task.id}/"
|
||||
policy_url = f"{base_url}/{policy_task.id}/"
|
||||
@@ -939,7 +887,6 @@ class TestTaskPermissions(TacticalTestCase):
|
||||
self.check_authorized(method, policy_url)
|
||||
|
||||
def test_task_action_permissions(self):
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
|
||||
@@ -18,7 +18,6 @@ class GetAddAutoTasks(APIView):
|
||||
permission_classes = [IsAuthenticated, AutoTaskPerms]
|
||||
|
||||
def get(self, request, agent_id=None, policy=None):
|
||||
|
||||
if agent_id:
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
tasks = agent.get_tasks_with_policies()
|
||||
@@ -59,7 +58,6 @@ class GetEditDeleteAutoTask(APIView):
|
||||
permission_classes = [IsAuthenticated, AutoTaskPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if task.agent and not _has_perm_on_agent(request.user, task.agent.agent_id):
|
||||
@@ -68,7 +66,6 @@ class GetEditDeleteAutoTask(APIView):
|
||||
return Response(TaskSerializer(task).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if task.agent and not _has_perm_on_agent(request.user, task.agent.agent_id):
|
||||
|
||||
25
api/tacticalrmm/checks/migrations/0031_check_env_vars.py
Normal file
25
api/tacticalrmm/checks/migrations/0031_check_env_vars.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 4.1.3 on 2022-12-03 09:38
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0030_alter_checkresult_retcode"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="check",
|
||||
name="env_vars",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(blank=True, null=True),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -98,6 +98,12 @@ class Check(BaseAuditModel):
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
env_vars = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
info_return_codes = ArrayField(
|
||||
models.PositiveIntegerField(),
|
||||
null=True,
|
||||
@@ -149,11 +155,10 @@ class Check(BaseAuditModel):
|
||||
def __str__(self):
|
||||
if self.agent:
|
||||
return f"{self.agent.hostname} - {self.readable_desc}"
|
||||
else:
|
||||
return f"{self.policy.name} - {self.readable_desc}"
|
||||
|
||||
return f"{self.policy.name} - {self.readable_desc}"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
# if check is a policy check clear cache on everything
|
||||
if self.policy:
|
||||
cache.delete_many_pattern("site_*_checks")
|
||||
@@ -169,7 +174,6 @@ class Check(BaseAuditModel):
|
||||
)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
|
||||
# if check is a policy check clear cache on everything
|
||||
if self.policy:
|
||||
cache.delete_many_pattern("site_*_checks")
|
||||
@@ -188,7 +192,6 @@ class Check(BaseAuditModel):
|
||||
def readable_desc(self):
|
||||
display = self.get_check_type_display() # type: ignore
|
||||
if self.check_type == CheckType.DISK_SPACE:
|
||||
|
||||
text = ""
|
||||
if self.warning_threshold:
|
||||
text += f" Warning Threshold: {self.warning_threshold}%"
|
||||
@@ -198,10 +201,7 @@ class Check(BaseAuditModel):
|
||||
return f"{display}: Drive {self.disk} - {text}"
|
||||
elif self.check_type == CheckType.PING:
|
||||
return f"{display}: {self.name}"
|
||||
elif (
|
||||
self.check_type == CheckType.CPU_LOAD or self.check_type == CheckType.MEMORY
|
||||
):
|
||||
|
||||
elif self.check_type in (CheckType.CPU_LOAD, CheckType.MEMORY):
|
||||
text = ""
|
||||
if self.warning_threshold:
|
||||
text += f" Warning Threshold: {self.warning_threshold}%"
|
||||
@@ -215,17 +215,14 @@ class Check(BaseAuditModel):
|
||||
return f"{display}: {self.name}"
|
||||
elif self.check_type == CheckType.SCRIPT:
|
||||
return f"{display}: {self.script.name}"
|
||||
else:
|
||||
return "n/a"
|
||||
|
||||
return "n/a"
|
||||
|
||||
@staticmethod
|
||||
def non_editable_fields() -> list[str]:
|
||||
return CHECKS_NON_EDITABLE_FIELDS
|
||||
|
||||
def create_policy_check(self, policy: "Policy") -> None:
|
||||
|
||||
fields_to_copy = POLICY_CHECK_FIELDS_TO_COPY
|
||||
|
||||
check = Check.objects.create(
|
||||
policy=policy,
|
||||
)
|
||||
@@ -233,13 +230,12 @@ class Check(BaseAuditModel):
|
||||
for task in self.assignedtasks.all(): # type: ignore
|
||||
task.create_policy_task(policy=policy, assigned_check=check)
|
||||
|
||||
for field in fields_to_copy:
|
||||
for field in POLICY_CHECK_FIELDS_TO_COPY:
|
||||
setattr(check, field, getattr(self, field))
|
||||
|
||||
check.save()
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
|
||||
return (
|
||||
self.dashboard_alert
|
||||
or self.email_alert
|
||||
@@ -333,14 +329,13 @@ class CheckResult(models.Model):
|
||||
return f"{self.agent.hostname} - {self.assigned_check}"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
# if check is a policy check clear cache on everything
|
||||
if not self.alert_severity and self.assigned_check.check_type in [
|
||||
if not self.alert_severity and self.assigned_check.check_type in (
|
||||
CheckType.MEMORY,
|
||||
CheckType.CPU_LOAD,
|
||||
CheckType.DISK_SPACE,
|
||||
CheckType.SCRIPT,
|
||||
]:
|
||||
):
|
||||
self.alert_severity = AlertSeverity.WARNING
|
||||
|
||||
super(CheckResult, self).save(
|
||||
@@ -371,7 +366,6 @@ class CheckResult(models.Model):
|
||||
update_fields = []
|
||||
# cpuload or mem checks
|
||||
if check.check_type in (CheckType.CPU_LOAD, CheckType.MEMORY):
|
||||
|
||||
self.history.append(data["percent"])
|
||||
|
||||
if len(self.history) > 15:
|
||||
@@ -536,7 +530,6 @@ class CheckResult(models.Model):
|
||||
return self.status
|
||||
|
||||
def send_email(self):
|
||||
|
||||
CORE = get_core_settings()
|
||||
|
||||
body: str = ""
|
||||
@@ -565,14 +558,12 @@ class CheckResult(models.Model):
|
||||
body = subject + f" - Disk {self.assigned_check.disk} does not exist"
|
||||
|
||||
elif self.assigned_check.check_type == CheckType.SCRIPT:
|
||||
|
||||
body = (
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
elif self.assigned_check.check_type == CheckType.PING:
|
||||
|
||||
body = self.more_info
|
||||
|
||||
elif self.assigned_check.check_type in (CheckType.CPU_LOAD, CheckType.MEMORY):
|
||||
@@ -594,7 +585,6 @@ class CheckResult(models.Model):
|
||||
body = subject + f" - Status: {self.more_info}"
|
||||
|
||||
elif self.assigned_check.check_type == CheckType.EVENT_LOG:
|
||||
|
||||
if self.assigned_check.event_source and self.assigned_check.event_message:
|
||||
start = f"Event ID {self.assigned_check.event_id}, source {self.assigned_check.event_source}, containing string {self.assigned_check.event_message} "
|
||||
elif self.assigned_check.event_source:
|
||||
@@ -616,7 +606,6 @@ class CheckResult(models.Model):
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||
|
||||
def send_sms(self):
|
||||
|
||||
CORE = get_core_settings()
|
||||
body: str = ""
|
||||
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||
from tacticalrmm.permissions import (
|
||||
_has_perm,
|
||||
_has_perm_on_agent,
|
||||
_has_perm_on_client,
|
||||
_has_perm_on_site,
|
||||
)
|
||||
|
||||
|
||||
class ChecksPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET" or r.method == "PATCH":
|
||||
if r.method in ("GET", "PATCH"):
|
||||
if "agent_id" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_checks") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
@@ -21,3 +26,17 @@ class RunChecksPerms(permissions.BasePermission):
|
||||
return _has_perm(r, "can_run_checks") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class BulkRunChecksPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if not _has_perm(r, "can_run_checks"):
|
||||
return False
|
||||
|
||||
if view.kwargs["target"] == "client":
|
||||
return _has_perm_on_client(user=r.user, client_id=view.kwargs["pk"])
|
||||
|
||||
elif view.kwargs["target"] == "site":
|
||||
return _has_perm_on_site(user=r.user, site_id=view.kwargs["pk"])
|
||||
|
||||
return False
|
||||
|
||||
@@ -22,7 +22,6 @@ class CheckResultSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class CheckSerializer(serializers.ModelSerializer):
|
||||
|
||||
readable_desc = serializers.ReadOnlyField()
|
||||
assignedtasks = AssignedTaskField(many=True, read_only=True)
|
||||
alert_template = serializers.SerializerMethodField()
|
||||
@@ -43,13 +42,13 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
|
||||
if not alert_template:
|
||||
return None
|
||||
else:
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.check_always_email,
|
||||
"always_text": alert_template.check_always_text,
|
||||
"always_alert": alert_template.check_always_alert,
|
||||
}
|
||||
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.check_always_email,
|
||||
"always_text": alert_template.check_always_text,
|
||||
"always_alert": alert_template.check_always_alert,
|
||||
}
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
@@ -82,7 +81,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
|
||||
if not val["warning_threshold"] and not val["error_threshold"]:
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold or Error Threshold must be set"
|
||||
"Warning threshold or Error Threshold must be set"
|
||||
)
|
||||
|
||||
if (
|
||||
@@ -91,7 +90,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
and val["error_threshold"] > 0
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold must be greater than Error Threshold"
|
||||
"Warning threshold must be greater than Error Threshold"
|
||||
)
|
||||
|
||||
# ping checks
|
||||
@@ -113,7 +112,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
|
||||
if not val["warning_threshold"] and not val["error_threshold"]:
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold or Error Threshold must be set"
|
||||
"Warning threshold or Error Threshold must be set"
|
||||
)
|
||||
|
||||
if (
|
||||
@@ -122,7 +121,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
and val["error_threshold"] > 0
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold must be less than Error Threshold"
|
||||
"Warning threshold must be less than Error Threshold"
|
||||
)
|
||||
|
||||
if check_type == CheckType.MEMORY and not self.instance:
|
||||
@@ -133,7 +132,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
|
||||
if not val["warning_threshold"] and not val["error_threshold"]:
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold or Error Threshold must be set"
|
||||
"Warning threshold or Error Threshold must be set"
|
||||
)
|
||||
|
||||
if (
|
||||
@@ -142,7 +141,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
and val["error_threshold"] > 0
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
f"Warning threshold must be less than Error Threshold"
|
||||
"Warning threshold must be less than Error Threshold"
|
||||
)
|
||||
|
||||
return val
|
||||
@@ -158,6 +157,7 @@ class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
# only send data needed for agent to run a check
|
||||
script = ScriptCheckSerializer(read_only=True)
|
||||
script_args = serializers.SerializerMethodField()
|
||||
env_vars = serializers.SerializerMethodField()
|
||||
|
||||
def get_script_args(self, obj):
|
||||
if obj.check_type != CheckType.SCRIPT:
|
||||
@@ -168,6 +168,13 @@ class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
agent=agent, shell=obj.script.shell, args=obj.script_args
|
||||
)
|
||||
|
||||
def get_env_vars(self, obj):
|
||||
if obj.check_type != CheckType.SCRIPT:
|
||||
return []
|
||||
|
||||
# check's env_vars override the script's env vars
|
||||
return obj.env_vars or obj.script.env_vars
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
exclude = [
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import datetime as dt
|
||||
import random
|
||||
from time import sleep
|
||||
from typing import Optional
|
||||
|
||||
@@ -8,6 +7,7 @@ from django.utils import timezone as djangotime
|
||||
from alerts.models import Alert
|
||||
from checks.models import CheckResult
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.helpers import rand_range
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -24,7 +24,7 @@ def handle_check_email_alert_task(
|
||||
check_result = CheckResult.objects.get(
|
||||
assigned_check=alert.assigned_check, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 5))
|
||||
sleep(rand_range(100, 1500))
|
||||
check_result.send_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
@@ -36,7 +36,7 @@ def handle_check_email_alert_task(
|
||||
check_result = CheckResult.objects.get(
|
||||
assigned_check=alert.assigned_check, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 5))
|
||||
sleep(rand_range(100, 1500))
|
||||
check_result.send_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
@@ -46,7 +46,6 @@ def handle_check_email_alert_task(
|
||||
|
||||
@app.task
|
||||
def handle_check_sms_alert_task(pk: int, alert_interval: Optional[float] = None) -> str:
|
||||
|
||||
try:
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
except Alert.DoesNotExist:
|
||||
@@ -57,7 +56,7 @@ def handle_check_sms_alert_task(pk: int, alert_interval: Optional[float] = None)
|
||||
check_result = CheckResult.objects.get(
|
||||
assigned_check=alert.assigned_check, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 3))
|
||||
sleep(rand_range(100, 1500))
|
||||
check_result.send_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
@@ -69,7 +68,7 @@ def handle_check_sms_alert_task(pk: int, alert_interval: Optional[float] = None)
|
||||
check_result = CheckResult.objects.get(
|
||||
assigned_check=alert.assigned_check, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 3))
|
||||
sleep(rand_range(100, 1500))
|
||||
check_result.send_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
@@ -79,7 +78,6 @@ def handle_check_sms_alert_task(pk: int, alert_interval: Optional[float] = None)
|
||||
|
||||
@app.task
|
||||
def handle_resolved_check_sms_alert_task(pk: int) -> str:
|
||||
|
||||
try:
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
except Alert.DoesNotExist:
|
||||
@@ -90,7 +88,7 @@ def handle_resolved_check_sms_alert_task(pk: int) -> str:
|
||||
check_result = CheckResult.objects.get(
|
||||
assigned_check=alert.assigned_check, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 3))
|
||||
sleep(rand_range(100, 1500))
|
||||
check_result.send_resolved_sms()
|
||||
alert.resolved_sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_sms_sent"])
|
||||
@@ -100,7 +98,6 @@ def handle_resolved_check_sms_alert_task(pk: int) -> str:
|
||||
|
||||
@app.task
|
||||
def handle_resolved_check_email_alert_task(pk: int) -> str:
|
||||
|
||||
try:
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
except Alert.DoesNotExist:
|
||||
@@ -111,7 +108,7 @@ def handle_resolved_check_email_alert_task(pk: int) -> str:
|
||||
check_result = CheckResult.objects.get(
|
||||
assigned_check=alert.assigned_check, agent=alert.agent
|
||||
)
|
||||
sleep(random.randint(1, 5))
|
||||
sleep(rand_range(100, 1500))
|
||||
check_result.send_resolved_email()
|
||||
alert.resolved_email_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_email_sent"])
|
||||
|
||||
@@ -41,7 +41,7 @@ class TestCheckViews(TacticalTestCase):
|
||||
self.assertEqual(len(resp.data), 4)
|
||||
|
||||
# test agent doesn't exist
|
||||
url = f"/agents/jh3498uf8fkh4ro8hfd8df98/checks/"
|
||||
url = "/agents/jh3498uf8fkh4ro8hfd8df98/checks/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
@@ -101,8 +101,7 @@ class TestCheckViews(TacticalTestCase):
|
||||
"fails_b4_alert": 3,
|
||||
}
|
||||
|
||||
for payload in [agent_payload, policy_payload]:
|
||||
|
||||
for payload in (agent_payload, policy_payload):
|
||||
# add valid check
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -148,8 +147,7 @@ class TestCheckViews(TacticalTestCase):
|
||||
"fails_b4_alert": 9,
|
||||
}
|
||||
|
||||
for payload in [agent_payload, policy_payload]:
|
||||
|
||||
for payload in (agent_payload, policy_payload):
|
||||
# add cpu check
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -195,8 +193,7 @@ class TestCheckViews(TacticalTestCase):
|
||||
"fails_b4_alert": 1,
|
||||
}
|
||||
|
||||
for payload in [agent_payload, policy_payload]:
|
||||
|
||||
for payload in (agent_payload, policy_payload):
|
||||
# add memory check
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -239,7 +236,6 @@ class TestCheckViews(TacticalTestCase):
|
||||
r = self.client.post(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), f"Checks will now be re-run on {agent.hostname}")
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "timeout"
|
||||
@@ -885,12 +881,12 @@ class TestCheckPermissions(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
policy = baker.make("automation.Policy")
|
||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||
check = baker.make("checks.Check", agent=agent, _quantity=5)
|
||||
unauthorized_check = baker.make(
|
||||
check = baker.make("checks.Check", agent=agent, _quantity=5) # noqa
|
||||
unauthorized_check = baker.make( # noqa
|
||||
"checks.Check", agent=unauthorized_agent, _quantity=7
|
||||
)
|
||||
|
||||
policy_checks = baker.make("checks.Check", policy=policy, _quantity=2)
|
||||
policy_checks = baker.make("checks.Check", policy=policy, _quantity=2) # noqa
|
||||
|
||||
# test super user access
|
||||
self.check_authorized_superuser("get", f"{base_url}/")
|
||||
@@ -973,7 +969,7 @@ class TestCheckPermissions(TacticalTestCase):
|
||||
|
||||
url = f"{base_url}/"
|
||||
|
||||
for data in [policy_data, agent_data]:
|
||||
for data in (policy_data, agent_data):
|
||||
# test superuser access
|
||||
self.check_authorized_superuser("post", url, data)
|
||||
|
||||
@@ -1007,8 +1003,7 @@ class TestCheckPermissions(TacticalTestCase):
|
||||
unauthorized_check = baker.make("checks.Check", agent=unauthorized_agent)
|
||||
policy_check = baker.make("checks.Check", policy=policy)
|
||||
|
||||
for method in ["get", "put", "delete"]:
|
||||
|
||||
for method in ("get", "put", "delete"):
|
||||
url = f"{base_url}/{check.id}/"
|
||||
unauthorized_url = f"{base_url}/{unauthorized_check.id}/"
|
||||
policy_url = f"{base_url}/{policy_check.id}/"
|
||||
@@ -1047,7 +1042,6 @@ class TestCheckPermissions(TacticalTestCase):
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_check_action_permissions(self, nats_cmd):
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||
check = baker.make("checks.Check", agent=agent)
|
||||
@@ -1061,7 +1055,7 @@ class TestCheckPermissions(TacticalTestCase):
|
||||
assigned_check=unauthorized_check,
|
||||
)
|
||||
|
||||
for action in ["reset", "run"]:
|
||||
for action in ("reset", "run"):
|
||||
if action == "reset":
|
||||
url = f"{base_url}/{check_result.id}/{action}/"
|
||||
unauthorized_url = (
|
||||
|
||||
@@ -8,4 +8,5 @@ urlpatterns = [
|
||||
path("<int:pk>/reset/", views.ResetCheck.as_view()),
|
||||
path("<agent:agent_id>/run/", views.run_checks),
|
||||
path("<int:pk>/history/", views.GetCheckHistory.as_view()),
|
||||
path("<str:target>/<int:pk>/csbulkrun/", views.bulk_run_checks),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import asyncio
|
||||
from datetime import datetime as dt
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import msgpack
|
||||
import nats
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
@@ -14,13 +17,16 @@ from agents.models import Agent
|
||||
from alerts.models import Alert
|
||||
from automation.models import Policy
|
||||
from tacticalrmm.constants import CheckStatus, CheckType
|
||||
from tacticalrmm.helpers import notify_error
|
||||
from tacticalrmm.helpers import notify_error, setup_nats_options
|
||||
from tacticalrmm.permissions import _has_perm_on_agent
|
||||
|
||||
from .models import Check, CheckHistory, CheckResult
|
||||
from .permissions import ChecksPerms, RunChecksPerms
|
||||
from .permissions import BulkRunChecksPerms, ChecksPerms, RunChecksPerms
|
||||
from .serializers import CheckHistorySerializer, CheckSerializer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from nats.aio.client import Client as NATSClient
|
||||
|
||||
|
||||
class GetAddChecks(APIView):
|
||||
permission_classes = [IsAuthenticated, ChecksPerms]
|
||||
@@ -37,7 +43,6 @@ class GetAddChecks(APIView):
|
||||
return Response(CheckSerializer(checks, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
|
||||
data = request.data.copy()
|
||||
# Determine if adding check to Agent and replace agent_id with pk
|
||||
if "agent" in data.keys():
|
||||
@@ -169,6 +174,44 @@ def run_checks(request, agent_id):
|
||||
if r == "busy":
|
||||
return notify_error(f"Checks are already running on {agent.hostname}")
|
||||
elif r == "ok":
|
||||
return Response(f"Checks will now be re-run on {agent.hostname}")
|
||||
else:
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response(f"Checks will now be run on {agent.hostname}")
|
||||
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAuthenticated, BulkRunChecksPerms])
|
||||
def bulk_run_checks(request, target, pk):
|
||||
q = Q()
|
||||
match target:
|
||||
case "client":
|
||||
q = Q(site__client__id=pk)
|
||||
case "site":
|
||||
q = Q(site__id=pk)
|
||||
|
||||
agents = list(
|
||||
Agent.objects.only("agent_id", "site")
|
||||
.filter(q)
|
||||
.values_list("agent_id", flat=True)
|
||||
)
|
||||
|
||||
if not agents:
|
||||
return notify_error("No agents matched query")
|
||||
|
||||
async def _run_check(nc: "NATSClient", sub) -> None:
|
||||
await nc.publish(subject=sub, payload=msgpack.dumps({"func": "runchecks"}))
|
||||
|
||||
async def _run() -> None:
|
||||
opts = setup_nats_options()
|
||||
try:
|
||||
nc = await nats.connect(**opts)
|
||||
except Exception as e:
|
||||
return notify_error(str(e))
|
||||
|
||||
tasks = [_run_check(nc=nc, sub=agent) for agent in agents]
|
||||
await asyncio.gather(*tasks)
|
||||
await nc.close()
|
||||
|
||||
asyncio.run(_run())
|
||||
ret = f"Checks will now be run on {len(agents)} agents"
|
||||
return Response(ret)
|
||||
|
||||
@@ -229,16 +229,16 @@ class ClientCustomField(models.Model):
|
||||
return self.multiple_value
|
||||
elif self.field.type == CustomFieldType.CHECKBOX:
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
if self.field.type in (
|
||||
CustomFieldType.TEXT,
|
||||
CustomFieldType.NUMBER,
|
||||
CustomFieldType.SINGLE,
|
||||
CustomFieldType.DATETIME,
|
||||
]:
|
||||
):
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif self.field.type == CustomFieldType.MULTIPLE:
|
||||
@@ -280,16 +280,16 @@ class SiteCustomField(models.Model):
|
||||
return self.multiple_value
|
||||
elif self.field.type == CustomFieldType.CHECKBOX:
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
if self.field.type in (
|
||||
CustomFieldType.TEXT,
|
||||
CustomFieldType.NUMBER,
|
||||
CustomFieldType.SINGLE,
|
||||
CustomFieldType.DATETIME,
|
||||
]:
|
||||
):
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif self.field.type == CustomFieldType.MULTIPLE:
|
||||
|
||||
@@ -12,7 +12,7 @@ class ClientsPerms(permissions.BasePermission):
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_clients")
|
||||
elif r.method == "PUT" or r.method == "DELETE":
|
||||
elif r.method in ("PUT", "DELETE"):
|
||||
return _has_perm(r, "can_manage_clients") and _has_perm_on_client(
|
||||
r.user, view.kwargs["pk"]
|
||||
)
|
||||
@@ -29,7 +29,7 @@ class SitesPerms(permissions.BasePermission):
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_sites")
|
||||
elif r.method == "PUT" or r.method == "DELETE":
|
||||
elif r.method in ("PUT", "DELETE"):
|
||||
return _has_perm(r, "can_manage_sites") and _has_perm_on_site(
|
||||
r.user, view.kwargs["pk"]
|
||||
)
|
||||
@@ -41,5 +41,5 @@ class DeploymentPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_deployments")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_deployments")
|
||||
|
||||
return _has_perm(r, "can_manage_deployments")
|
||||
|
||||
@@ -23,7 +23,7 @@ class TestClientViews(TacticalTestCase):
|
||||
def test_get_clients(self):
|
||||
# setup data
|
||||
baker.make("clients.Client", _quantity=5)
|
||||
clients = Client.objects.all()
|
||||
clients = Client.objects.all() # noqa
|
||||
|
||||
url = f"{base_url}/"
|
||||
r = self.client.get(url, format="json")
|
||||
@@ -520,7 +520,6 @@ class TestClientPermissions(TacticalTestCase):
|
||||
@patch("clients.models.Client.save")
|
||||
@patch("clients.models.Client.delete")
|
||||
def test_add_clients_permissions(self, save, delete):
|
||||
|
||||
data = {"client": {"name": "Client Name"}, "site": {"name": "Site Name"}}
|
||||
|
||||
url = f"{base_url}/"
|
||||
@@ -710,8 +709,8 @@ class TestClientPermissions(TacticalTestCase):
|
||||
|
||||
site = baker.make("clients.Site")
|
||||
other_site = baker.make("clients.Site")
|
||||
deployments = baker.make("clients.Deployment", site=site, _quantity=5)
|
||||
other_deployments = baker.make(
|
||||
deployments = baker.make("clients.Deployment", site=site, _quantity=5) # noqa
|
||||
other_deployments = baker.make( # noqa
|
||||
"clients.Deployment", site=other_site, _quantity=7
|
||||
)
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import datetime as dt
|
||||
import re
|
||||
import uuid
|
||||
from contextlib import suppress
|
||||
|
||||
from django.db.models import Count, Exists, OuterRef, Prefetch, prefetch_related_objects
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -95,7 +96,6 @@ class GetAddClients(APIView):
|
||||
# save custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["client"] = client.id
|
||||
|
||||
@@ -147,7 +147,6 @@ class GetUpdateDeleteClient(APIView):
|
||||
# update custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["client"] = pk
|
||||
|
||||
@@ -194,7 +193,6 @@ class GetAddSites(APIView):
|
||||
return Response(SiteSerializer(sites, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
|
||||
if not _has_perm_on_client(request.user, request.data["site"]["client"]):
|
||||
raise PermissionDenied()
|
||||
|
||||
@@ -204,9 +202,7 @@ class GetAddSites(APIView):
|
||||
|
||||
# save custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["site"] = site.id
|
||||
|
||||
@@ -245,9 +241,7 @@ class GetUpdateDeleteSite(APIView):
|
||||
|
||||
# update custom field
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["site"] = pk
|
||||
|
||||
@@ -338,17 +332,14 @@ class AgentDeployment(APIView):
|
||||
if not _has_perm_on_site(request.user, d.site.pk):
|
||||
raise PermissionDenied()
|
||||
|
||||
try:
|
||||
with suppress(Exception):
|
||||
d.auth_token.delete()
|
||||
except:
|
||||
pass
|
||||
|
||||
d.delete()
|
||||
return Response("The deployment was deleted")
|
||||
|
||||
|
||||
class GenerateAgent(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def get(self, request, uid):
|
||||
|
||||
@@ -50,7 +50,8 @@ localectl set-locale LANG=en_US.UTF-8
|
||||
|
||||
RemoveOldAgent() {
|
||||
if [ -f "${agentSysD}" ]; then
|
||||
systemctl disable --now ${agentSvcName}
|
||||
systemctl disable ${agentSvcName}
|
||||
systemctl stop ${agentSvcName}
|
||||
rm -f ${agentSysD}
|
||||
systemctl daemon-reload
|
||||
fi
|
||||
@@ -67,8 +68,11 @@ RemoveOldAgent() {
|
||||
InstallMesh() {
|
||||
if [ -f /etc/os-release ]; then
|
||||
distroID=$(. /etc/os-release; echo $ID)
|
||||
distroIDLIKE=$(. /etc/os-release; echo $ID_LIKE)
|
||||
if [[ " ${deb[*]} " =~ " ${distroID} " ]]; then
|
||||
set_locale_deb
|
||||
elif [[ " ${deb[*]} " =~ " ${distroIDLIKE} " ]]; then
|
||||
set_locale_deb
|
||||
elif [[ " ${rhe[*]} " =~ " ${distroID} " ]]; then
|
||||
set_locale_rhel
|
||||
else
|
||||
@@ -78,7 +82,7 @@ InstallMesh() {
|
||||
|
||||
meshTmpDir=$(mktemp -d -t "mesh-XXXXXXXXX")
|
||||
if [ $? -ne 0 ]; then
|
||||
meshTmpDir='meshtemp'
|
||||
meshTmpDir='/root/meshtemp'
|
||||
mkdir -p ${meshTmpDir}
|
||||
fi
|
||||
meshTmpBin="${meshTmpDir}/meshagent"
|
||||
@@ -97,7 +101,8 @@ RemoveMesh() {
|
||||
fi
|
||||
|
||||
if [ -f "${meshSysD}" ]; then
|
||||
systemctl disable --now ${meshSvcName} > /dev/null 2>&1
|
||||
systemctl stop ${meshSvcName} > /dev/null 2>&1
|
||||
systemctl disable ${meshSvcName} > /dev/null 2>&1
|
||||
rm -f ${meshSysD}
|
||||
fi
|
||||
|
||||
@@ -119,6 +124,10 @@ RemoveOldAgent
|
||||
|
||||
echo "Downloading tactical agent..."
|
||||
wget -q -O ${agentBin} "${agentDL}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Unable to download tactical agent"
|
||||
exit 1
|
||||
fi
|
||||
chmod +x ${agentBin}
|
||||
|
||||
MESH_NODE_ID=""
|
||||
@@ -178,4 +187,5 @@ EOF
|
||||
echo "${tacticalsvc}" | tee ${agentSysD} > /dev/null
|
||||
|
||||
systemctl daemon-reload
|
||||
systemctl enable --now ${agentSvcName}
|
||||
systemctl enable ${agentSvcName}
|
||||
systemctl start ${agentSvcName}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
|
||||
from channels.db import database_sync_to_async
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
||||
@@ -12,7 +13,6 @@ from tacticalrmm.constants import AgentMonType
|
||||
|
||||
class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
async def connect(self):
|
||||
|
||||
self.user = self.scope["user"]
|
||||
|
||||
if isinstance(self.user, AnonymousUser):
|
||||
@@ -23,16 +23,13 @@ class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
self.dash_info = asyncio.create_task(self.send_dash_info())
|
||||
|
||||
async def disconnect(self, close_code):
|
||||
|
||||
try:
|
||||
with suppress(Exception):
|
||||
self.dash_info.cancel()
|
||||
except:
|
||||
pass
|
||||
|
||||
self.connected = False
|
||||
await self.close()
|
||||
|
||||
async def receive(self, json_data=None):
|
||||
async def receive_json(self, payload, **kwargs):
|
||||
pass
|
||||
|
||||
@database_sync_to_async
|
||||
|
||||
@@ -13,7 +13,7 @@ $apilink = $downloadlink.split('/')
|
||||
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
|
||||
$serviceName = 'tacticalagent'
|
||||
$serviceName = 'tacticalrmm'
|
||||
If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
|
||||
write-host ('Tactical RMM Is Already Installed')
|
||||
} Else {
|
||||
|
||||
16
api/tacticalrmm/core/mac_uninstall.sh
Executable file
16
api/tacticalrmm/core/mac_uninstall.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -f /usr/local/mesh_services/meshagent/meshagent ]; then
|
||||
/usr/local/mesh_services/meshagent/meshagent -fulluninstall
|
||||
fi
|
||||
|
||||
if [ -f /opt/tacticalmesh/meshagent ]; then
|
||||
/opt/tacticalmesh/meshagent -fulluninstall
|
||||
fi
|
||||
|
||||
launchctl bootout system /Library/LaunchDaemons/tacticalagent.plist
|
||||
rm -rf /usr/local/mesh_services
|
||||
rm -rf /opt/tacticalmesh
|
||||
rm -f /etc/tacticalagent
|
||||
rm -rf /opt/tacticalagent
|
||||
rm -f /Library/LaunchDaemons/tacticalagent.plist
|
||||
@@ -50,7 +50,7 @@ class Command(BaseCommand):
|
||||
mesh_id = asyncio.run(get_mesh_device_id(uri, core.mesh_device_group))
|
||||
except IndexError:
|
||||
self._error(
|
||||
"Error: you are using a custom mesh device group and need to add its name in Global Settings > MeshCentral"
|
||||
"Error: you are using a custom mesh device group name. The name in TRMMs Global Settings > MeshCentral must match a MeshCentral group exactly."
|
||||
)
|
||||
return
|
||||
except Exception as e:
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
from django.core.cache import cache
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_OUTAGES_LOCK,
|
||||
ORPHANED_WIN_TASK_LOCK,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Clear redis celery locks. Should only be ran while celery/beat is stopped."
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
for key in (
|
||||
AGENT_OUTAGES_LOCK,
|
||||
ORPHANED_WIN_TASK_LOCK,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
):
|
||||
cache.delete(key)
|
||||
@@ -11,7 +11,6 @@ class Command(BaseCommand):
|
||||
help = "Generate conf for nats-api"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
self.stdout.write("Creating configuration for nats-api...")
|
||||
db = settings.DATABASES["default"]
|
||||
if hasattr(settings, "DB_SSL"):
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
import configparser
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate conf for uwsgi"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
self.stdout.write("Creating uwsgi conf...")
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
if getattr(settings, "DOCKER_BUILD", False):
|
||||
home = str(Path(os.getenv("VIRTUAL_ENV"))) # type: ignore
|
||||
socket = "0.0.0.0:8080"
|
||||
else:
|
||||
home = str(settings.BASE_DIR.parents[0] / "env")
|
||||
socket = str(settings.BASE_DIR / "tacticalrmm.sock")
|
||||
|
||||
config["uwsgi"] = {
|
||||
"chdir": str(settings.BASE_DIR),
|
||||
"module": "tacticalrmm.wsgi",
|
||||
"home": home,
|
||||
"master": str(getattr(settings, "UWSGI_MASTER", True)).lower(),
|
||||
"enable-threads": str(
|
||||
getattr(settings, "UWSGI_ENABLE_THREADS", True)
|
||||
).lower(),
|
||||
"socket": socket,
|
||||
"harakiri": str(getattr(settings, "UWSGI_HARAKIRI", 300)),
|
||||
"chmod-socket": str(getattr(settings, "UWSGI_CHMOD_SOCKET", 660)),
|
||||
"buffer-size": str(getattr(settings, "UWSGI_BUFFER_SIZE", 65535)),
|
||||
"vacuum": str(getattr(settings, "UWSGI_VACUUM", True)).lower(),
|
||||
"die-on-term": str(getattr(settings, "UWSGI_DIE_ON_TERM", True)).lower(),
|
||||
"max-requests": str(getattr(settings, "UWSGI_MAX_REQUESTS", 500)),
|
||||
"disable-logging": str(
|
||||
getattr(settings, "UWSGI_DISABLE_LOGGING", True)
|
||||
).lower(),
|
||||
"cheaper-algo": "busyness",
|
||||
"cheaper": str(getattr(settings, "UWSGI_CHEAPER", 4)),
|
||||
"cheaper-initial": str(getattr(settings, "UWSGI_CHEAPER_INITIAL", 4)),
|
||||
"workers": str(getattr(settings, "UWSGI_MAX_WORKERS", 40)),
|
||||
"cheaper-step": str(getattr(settings, "UWSGI_CHEAPER_STEP", 2)),
|
||||
"cheaper-overload": str(getattr(settings, "UWSGI_CHEAPER_OVERLOAD", 3)),
|
||||
"cheaper-busyness-min": str(getattr(settings, "UWSGI_BUSYNESS_MIN", 5)),
|
||||
"cheaper-busyness-max": str(getattr(settings, "UWSGI_BUSYNESS_MAX", 10)),
|
||||
}
|
||||
|
||||
if getattr(settings, "UWSGI_DEBUG", False):
|
||||
config["uwsgi"]["stats"] = "/tmp/stats.socket"
|
||||
config["uwsgi"]["cheaper-busyness-verbose"] = str(True).lower()
|
||||
|
||||
with open(settings.BASE_DIR / "app.ini", "w") as fp:
|
||||
config.write(fp)
|
||||
|
||||
self.stdout.write("Created uwsgi conf")
|
||||
@@ -1,6 +1,10 @@
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from tacticalrmm.helpers import get_webdomain
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Get config vars to be used in shell scripts"
|
||||
@@ -22,6 +26,8 @@ class Command(BaseCommand):
|
||||
self.stdout.write(settings.NATS_SERVER_VER)
|
||||
case "frontend":
|
||||
self.stdout.write(settings.CORS_ORIGIN_WHITELIST[0])
|
||||
case "webdomain":
|
||||
self.stdout.write(get_webdomain())
|
||||
case "djangoadmin":
|
||||
url = f"https://{settings.ALLOWED_HOSTS[0]}/{settings.ADMIN_URL}"
|
||||
self.stdout.write(url)
|
||||
@@ -39,7 +45,7 @@ class Command(BaseCommand):
|
||||
self.stdout.write(settings.DATABASES["default"]["HOST"])
|
||||
case "dbport":
|
||||
self.stdout.write(settings.DATABASES["default"]["PORT"])
|
||||
case "meshsite" | "meshuser" | "meshtoken":
|
||||
case "meshsite" | "meshuser" | "meshtoken" | "meshdomain":
|
||||
from core.models import CoreSettings
|
||||
|
||||
core: "CoreSettings" = CoreSettings.objects.first()
|
||||
@@ -47,6 +53,8 @@ class Command(BaseCommand):
|
||||
obj = core.mesh_site
|
||||
elif kwargs["name"] == "meshuser":
|
||||
obj = core.mesh_username
|
||||
elif kwargs["name"] == "meshdomain":
|
||||
obj = urlparse(core.mesh_site).netloc
|
||||
else:
|
||||
obj = core.mesh_token
|
||||
|
||||
|
||||
@@ -11,9 +11,7 @@ class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, uri):
|
||||
|
||||
async with websockets.connect(uri) as websocket:
|
||||
|
||||
# Get Invitation Link
|
||||
await websocket.send(
|
||||
json.dumps(
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from contextlib import suppress
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
@@ -8,9 +10,7 @@ class Command(BaseCommand):
|
||||
help = "Populates the global site settings on first install"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
try:
|
||||
# can only be 1 instance of this. Prevents error when rebuilding docker container
|
||||
with suppress(ValidationError):
|
||||
CoreSettings().save()
|
||||
self.stdout.write("Core db populated")
|
||||
except ValidationError:
|
||||
# can only be 1 instance of this. Prevents error when rebuilding docker container
|
||||
pass
|
||||
|
||||
@@ -12,16 +12,13 @@ class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, uri):
|
||||
|
||||
async with websockets.connect(uri) as websocket:
|
||||
|
||||
# Get Device groups to see if it exists
|
||||
await websocket.send(json.dumps({"action": "meshes"}))
|
||||
|
||||
async for message in websocket:
|
||||
response = json.loads(message)
|
||||
if response["action"] == "meshes":
|
||||
|
||||
# If no meshes are present
|
||||
if not response["meshes"]:
|
||||
await websocket.send(
|
||||
|
||||
@@ -7,6 +7,6 @@ class Command(BaseCommand):
|
||||
help = "Collection of tasks to run after updating the rmm, before migrations"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
self.stdout.write(self.style.WARNING("Clearning the cache"))
|
||||
self.stdout.write(self.style.WARNING("Cleaning the cache"))
|
||||
clear_entire_cache()
|
||||
self.stdout.write(self.style.SUCCESS("Cache was cleared!"))
|
||||
|
||||
@@ -6,7 +6,9 @@ from autotasks.tasks import remove_orphaned_win_tasks
|
||||
from core.tasks import (
|
||||
cache_db_fields_task,
|
||||
core_maintenance_tasks,
|
||||
handle_resolved_stuff,
|
||||
resolve_alerts_task,
|
||||
resolve_pending_actions,
|
||||
sync_scheduled_tasks,
|
||||
)
|
||||
from winupdate.tasks import auto_approve_updates_task, check_agent_update_schedule_task
|
||||
|
||||
@@ -20,7 +22,9 @@ class Command(BaseCommand):
|
||||
unsnooze_alerts.delay()
|
||||
cache_db_fields_task.delay()
|
||||
core_maintenance_tasks.delay()
|
||||
handle_resolved_stuff.delay()
|
||||
resolve_pending_actions.delay()
|
||||
resolve_alerts_task.delay()
|
||||
sync_scheduled_tasks.delay()
|
||||
remove_orphaned_win_tasks.delay()
|
||||
auto_approve_updates_task.delay()
|
||||
check_agent_update_schedule_task.delay()
|
||||
|
||||
@@ -0,0 +1,630 @@
|
||||
# Generated by Django 4.1.7 on 2023-02-28 22:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("core", "0035_alter_coresettings_default_time_zone"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="coresettings",
|
||||
name="default_time_zone",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("Africa/Abidjan", "Africa/Abidjan"),
|
||||
("Africa/Accra", "Africa/Accra"),
|
||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
||||
("Africa/Algiers", "Africa/Algiers"),
|
||||
("Africa/Asmara", "Africa/Asmara"),
|
||||
("Africa/Asmera", "Africa/Asmera"),
|
||||
("Africa/Bamako", "Africa/Bamako"),
|
||||
("Africa/Bangui", "Africa/Bangui"),
|
||||
("Africa/Banjul", "Africa/Banjul"),
|
||||
("Africa/Bissau", "Africa/Bissau"),
|
||||
("Africa/Blantyre", "Africa/Blantyre"),
|
||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
||||
("Africa/Cairo", "Africa/Cairo"),
|
||||
("Africa/Casablanca", "Africa/Casablanca"),
|
||||
("Africa/Ceuta", "Africa/Ceuta"),
|
||||
("Africa/Conakry", "Africa/Conakry"),
|
||||
("Africa/Dakar", "Africa/Dakar"),
|
||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
||||
("Africa/Djibouti", "Africa/Djibouti"),
|
||||
("Africa/Douala", "Africa/Douala"),
|
||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
||||
("Africa/Freetown", "Africa/Freetown"),
|
||||
("Africa/Gaborone", "Africa/Gaborone"),
|
||||
("Africa/Harare", "Africa/Harare"),
|
||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
||||
("Africa/Juba", "Africa/Juba"),
|
||||
("Africa/Kampala", "Africa/Kampala"),
|
||||
("Africa/Khartoum", "Africa/Khartoum"),
|
||||
("Africa/Kigali", "Africa/Kigali"),
|
||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
||||
("Africa/Lagos", "Africa/Lagos"),
|
||||
("Africa/Libreville", "Africa/Libreville"),
|
||||
("Africa/Lome", "Africa/Lome"),
|
||||
("Africa/Luanda", "Africa/Luanda"),
|
||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
||||
("Africa/Lusaka", "Africa/Lusaka"),
|
||||
("Africa/Malabo", "Africa/Malabo"),
|
||||
("Africa/Maputo", "Africa/Maputo"),
|
||||
("Africa/Maseru", "Africa/Maseru"),
|
||||
("Africa/Mbabane", "Africa/Mbabane"),
|
||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
||||
("Africa/Monrovia", "Africa/Monrovia"),
|
||||
("Africa/Nairobi", "Africa/Nairobi"),
|
||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
||||
("Africa/Niamey", "Africa/Niamey"),
|
||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
||||
("Africa/Tripoli", "Africa/Tripoli"),
|
||||
("Africa/Tunis", "Africa/Tunis"),
|
||||
("Africa/Windhoek", "Africa/Windhoek"),
|
||||
("America/Adak", "America/Adak"),
|
||||
("America/Anchorage", "America/Anchorage"),
|
||||
("America/Anguilla", "America/Anguilla"),
|
||||
("America/Antigua", "America/Antigua"),
|
||||
("America/Araguaina", "America/Araguaina"),
|
||||
(
|
||||
"America/Argentina/Buenos_Aires",
|
||||
"America/Argentina/Buenos_Aires",
|
||||
),
|
||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
||||
(
|
||||
"America/Argentina/ComodRivadavia",
|
||||
"America/Argentina/ComodRivadavia",
|
||||
),
|
||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
||||
(
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
),
|
||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
||||
("America/Aruba", "America/Aruba"),
|
||||
("America/Asuncion", "America/Asuncion"),
|
||||
("America/Atikokan", "America/Atikokan"),
|
||||
("America/Atka", "America/Atka"),
|
||||
("America/Bahia", "America/Bahia"),
|
||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
||||
("America/Barbados", "America/Barbados"),
|
||||
("America/Belem", "America/Belem"),
|
||||
("America/Belize", "America/Belize"),
|
||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
||||
("America/Boa_Vista", "America/Boa_Vista"),
|
||||
("America/Bogota", "America/Bogota"),
|
||||
("America/Boise", "America/Boise"),
|
||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
||||
("America/Campo_Grande", "America/Campo_Grande"),
|
||||
("America/Cancun", "America/Cancun"),
|
||||
("America/Caracas", "America/Caracas"),
|
||||
("America/Catamarca", "America/Catamarca"),
|
||||
("America/Cayenne", "America/Cayenne"),
|
||||
("America/Cayman", "America/Cayman"),
|
||||
("America/Chicago", "America/Chicago"),
|
||||
("America/Chihuahua", "America/Chihuahua"),
|
||||
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
|
||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
||||
("America/Cordoba", "America/Cordoba"),
|
||||
("America/Costa_Rica", "America/Costa_Rica"),
|
||||
("America/Creston", "America/Creston"),
|
||||
("America/Cuiaba", "America/Cuiaba"),
|
||||
("America/Curacao", "America/Curacao"),
|
||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
||||
("America/Dawson", "America/Dawson"),
|
||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
||||
("America/Denver", "America/Denver"),
|
||||
("America/Detroit", "America/Detroit"),
|
||||
("America/Dominica", "America/Dominica"),
|
||||
("America/Edmonton", "America/Edmonton"),
|
||||
("America/Eirunepe", "America/Eirunepe"),
|
||||
("America/El_Salvador", "America/El_Salvador"),
|
||||
("America/Ensenada", "America/Ensenada"),
|
||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
||||
("America/Fortaleza", "America/Fortaleza"),
|
||||
("America/Glace_Bay", "America/Glace_Bay"),
|
||||
("America/Godthab", "America/Godthab"),
|
||||
("America/Goose_Bay", "America/Goose_Bay"),
|
||||
("America/Grand_Turk", "America/Grand_Turk"),
|
||||
("America/Grenada", "America/Grenada"),
|
||||
("America/Guadeloupe", "America/Guadeloupe"),
|
||||
("America/Guatemala", "America/Guatemala"),
|
||||
("America/Guayaquil", "America/Guayaquil"),
|
||||
("America/Guyana", "America/Guyana"),
|
||||
("America/Halifax", "America/Halifax"),
|
||||
("America/Havana", "America/Havana"),
|
||||
("America/Hermosillo", "America/Hermosillo"),
|
||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
||||
("America/Indianapolis", "America/Indianapolis"),
|
||||
("America/Inuvik", "America/Inuvik"),
|
||||
("America/Iqaluit", "America/Iqaluit"),
|
||||
("America/Jamaica", "America/Jamaica"),
|
||||
("America/Jujuy", "America/Jujuy"),
|
||||
("America/Juneau", "America/Juneau"),
|
||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
||||
("America/Knox_IN", "America/Knox_IN"),
|
||||
("America/Kralendijk", "America/Kralendijk"),
|
||||
("America/La_Paz", "America/La_Paz"),
|
||||
("America/Lima", "America/Lima"),
|
||||
("America/Los_Angeles", "America/Los_Angeles"),
|
||||
("America/Louisville", "America/Louisville"),
|
||||
("America/Lower_Princes", "America/Lower_Princes"),
|
||||
("America/Maceio", "America/Maceio"),
|
||||
("America/Managua", "America/Managua"),
|
||||
("America/Manaus", "America/Manaus"),
|
||||
("America/Marigot", "America/Marigot"),
|
||||
("America/Martinique", "America/Martinique"),
|
||||
("America/Matamoros", "America/Matamoros"),
|
||||
("America/Mazatlan", "America/Mazatlan"),
|
||||
("America/Mendoza", "America/Mendoza"),
|
||||
("America/Menominee", "America/Menominee"),
|
||||
("America/Merida", "America/Merida"),
|
||||
("America/Metlakatla", "America/Metlakatla"),
|
||||
("America/Mexico_City", "America/Mexico_City"),
|
||||
("America/Miquelon", "America/Miquelon"),
|
||||
("America/Moncton", "America/Moncton"),
|
||||
("America/Monterrey", "America/Monterrey"),
|
||||
("America/Montevideo", "America/Montevideo"),
|
||||
("America/Montreal", "America/Montreal"),
|
||||
("America/Montserrat", "America/Montserrat"),
|
||||
("America/Nassau", "America/Nassau"),
|
||||
("America/New_York", "America/New_York"),
|
||||
("America/Nipigon", "America/Nipigon"),
|
||||
("America/Nome", "America/Nome"),
|
||||
("America/Noronha", "America/Noronha"),
|
||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
||||
(
|
||||
"America/North_Dakota/New_Salem",
|
||||
"America/North_Dakota/New_Salem",
|
||||
),
|
||||
("America/Nuuk", "America/Nuuk"),
|
||||
("America/Ojinaga", "America/Ojinaga"),
|
||||
("America/Panama", "America/Panama"),
|
||||
("America/Pangnirtung", "America/Pangnirtung"),
|
||||
("America/Paramaribo", "America/Paramaribo"),
|
||||
("America/Phoenix", "America/Phoenix"),
|
||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
||||
("America/Porto_Acre", "America/Porto_Acre"),
|
||||
("America/Porto_Velho", "America/Porto_Velho"),
|
||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
||||
("America/Rainy_River", "America/Rainy_River"),
|
||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
||||
("America/Recife", "America/Recife"),
|
||||
("America/Regina", "America/Regina"),
|
||||
("America/Resolute", "America/Resolute"),
|
||||
("America/Rio_Branco", "America/Rio_Branco"),
|
||||
("America/Rosario", "America/Rosario"),
|
||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
||||
("America/Santarem", "America/Santarem"),
|
||||
("America/Santiago", "America/Santiago"),
|
||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
||||
("America/Scoresbysund", "America/Scoresbysund"),
|
||||
("America/Shiprock", "America/Shiprock"),
|
||||
("America/Sitka", "America/Sitka"),
|
||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
||||
("America/St_Johns", "America/St_Johns"),
|
||||
("America/St_Kitts", "America/St_Kitts"),
|
||||
("America/St_Lucia", "America/St_Lucia"),
|
||||
("America/St_Thomas", "America/St_Thomas"),
|
||||
("America/St_Vincent", "America/St_Vincent"),
|
||||
("America/Swift_Current", "America/Swift_Current"),
|
||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
||||
("America/Thule", "America/Thule"),
|
||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
||||
("America/Tijuana", "America/Tijuana"),
|
||||
("America/Toronto", "America/Toronto"),
|
||||
("America/Tortola", "America/Tortola"),
|
||||
("America/Vancouver", "America/Vancouver"),
|
||||
("America/Virgin", "America/Virgin"),
|
||||
("America/Whitehorse", "America/Whitehorse"),
|
||||
("America/Winnipeg", "America/Winnipeg"),
|
||||
("America/Yakutat", "America/Yakutat"),
|
||||
("America/Yellowknife", "America/Yellowknife"),
|
||||
("Antarctica/Casey", "Antarctica/Casey"),
|
||||
("Antarctica/Davis", "Antarctica/Davis"),
|
||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
||||
("Antarctica/Troll", "Antarctica/Troll"),
|
||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
||||
("Asia/Aden", "Asia/Aden"),
|
||||
("Asia/Almaty", "Asia/Almaty"),
|
||||
("Asia/Amman", "Asia/Amman"),
|
||||
("Asia/Anadyr", "Asia/Anadyr"),
|
||||
("Asia/Aqtau", "Asia/Aqtau"),
|
||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
||||
("Asia/Atyrau", "Asia/Atyrau"),
|
||||
("Asia/Baghdad", "Asia/Baghdad"),
|
||||
("Asia/Bahrain", "Asia/Bahrain"),
|
||||
("Asia/Baku", "Asia/Baku"),
|
||||
("Asia/Bangkok", "Asia/Bangkok"),
|
||||
("Asia/Barnaul", "Asia/Barnaul"),
|
||||
("Asia/Beirut", "Asia/Beirut"),
|
||||
("Asia/Bishkek", "Asia/Bishkek"),
|
||||
("Asia/Brunei", "Asia/Brunei"),
|
||||
("Asia/Calcutta", "Asia/Calcutta"),
|
||||
("Asia/Chita", "Asia/Chita"),
|
||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
||||
("Asia/Chongqing", "Asia/Chongqing"),
|
||||
("Asia/Chungking", "Asia/Chungking"),
|
||||
("Asia/Colombo", "Asia/Colombo"),
|
||||
("Asia/Dacca", "Asia/Dacca"),
|
||||
("Asia/Damascus", "Asia/Damascus"),
|
||||
("Asia/Dhaka", "Asia/Dhaka"),
|
||||
("Asia/Dili", "Asia/Dili"),
|
||||
("Asia/Dubai", "Asia/Dubai"),
|
||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
||||
("Asia/Famagusta", "Asia/Famagusta"),
|
||||
("Asia/Gaza", "Asia/Gaza"),
|
||||
("Asia/Harbin", "Asia/Harbin"),
|
||||
("Asia/Hebron", "Asia/Hebron"),
|
||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
||||
("Asia/Hovd", "Asia/Hovd"),
|
||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
||||
("Asia/Istanbul", "Asia/Istanbul"),
|
||||
("Asia/Jakarta", "Asia/Jakarta"),
|
||||
("Asia/Jayapura", "Asia/Jayapura"),
|
||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
||||
("Asia/Kabul", "Asia/Kabul"),
|
||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
||||
("Asia/Karachi", "Asia/Karachi"),
|
||||
("Asia/Kashgar", "Asia/Kashgar"),
|
||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
||||
("Asia/Katmandu", "Asia/Katmandu"),
|
||||
("Asia/Khandyga", "Asia/Khandyga"),
|
||||
("Asia/Kolkata", "Asia/Kolkata"),
|
||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
||||
("Asia/Kuching", "Asia/Kuching"),
|
||||
("Asia/Kuwait", "Asia/Kuwait"),
|
||||
("Asia/Macao", "Asia/Macao"),
|
||||
("Asia/Macau", "Asia/Macau"),
|
||||
("Asia/Magadan", "Asia/Magadan"),
|
||||
("Asia/Makassar", "Asia/Makassar"),
|
||||
("Asia/Manila", "Asia/Manila"),
|
||||
("Asia/Muscat", "Asia/Muscat"),
|
||||
("Asia/Nicosia", "Asia/Nicosia"),
|
||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
||||
("Asia/Omsk", "Asia/Omsk"),
|
||||
("Asia/Oral", "Asia/Oral"),
|
||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
||||
("Asia/Pontianak", "Asia/Pontianak"),
|
||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
||||
("Asia/Qatar", "Asia/Qatar"),
|
||||
("Asia/Qostanay", "Asia/Qostanay"),
|
||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
||||
("Asia/Rangoon", "Asia/Rangoon"),
|
||||
("Asia/Riyadh", "Asia/Riyadh"),
|
||||
("Asia/Saigon", "Asia/Saigon"),
|
||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
||||
("Asia/Samarkand", "Asia/Samarkand"),
|
||||
("Asia/Seoul", "Asia/Seoul"),
|
||||
("Asia/Shanghai", "Asia/Shanghai"),
|
||||
("Asia/Singapore", "Asia/Singapore"),
|
||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
||||
("Asia/Taipei", "Asia/Taipei"),
|
||||
("Asia/Tashkent", "Asia/Tashkent"),
|
||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
||||
("Asia/Tehran", "Asia/Tehran"),
|
||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
||||
("Asia/Thimbu", "Asia/Thimbu"),
|
||||
("Asia/Thimphu", "Asia/Thimphu"),
|
||||
("Asia/Tokyo", "Asia/Tokyo"),
|
||||
("Asia/Tomsk", "Asia/Tomsk"),
|
||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
||||
("Asia/Urumqi", "Asia/Urumqi"),
|
||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
||||
("Asia/Vientiane", "Asia/Vientiane"),
|
||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
||||
("Asia/Yangon", "Asia/Yangon"),
|
||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
||||
("Asia/Yerevan", "Asia/Yerevan"),
|
||||
("Atlantic/Azores", "Atlantic/Azores"),
|
||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
||||
("Atlantic/Canary", "Atlantic/Canary"),
|
||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
||||
("Australia/ACT", "Australia/ACT"),
|
||||
("Australia/Adelaide", "Australia/Adelaide"),
|
||||
("Australia/Brisbane", "Australia/Brisbane"),
|
||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
||||
("Australia/Canberra", "Australia/Canberra"),
|
||||
("Australia/Currie", "Australia/Currie"),
|
||||
("Australia/Darwin", "Australia/Darwin"),
|
||||
("Australia/Eucla", "Australia/Eucla"),
|
||||
("Australia/Hobart", "Australia/Hobart"),
|
||||
("Australia/LHI", "Australia/LHI"),
|
||||
("Australia/Lindeman", "Australia/Lindeman"),
|
||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
||||
("Australia/Melbourne", "Australia/Melbourne"),
|
||||
("Australia/NSW", "Australia/NSW"),
|
||||
("Australia/North", "Australia/North"),
|
||||
("Australia/Perth", "Australia/Perth"),
|
||||
("Australia/Queensland", "Australia/Queensland"),
|
||||
("Australia/South", "Australia/South"),
|
||||
("Australia/Sydney", "Australia/Sydney"),
|
||||
("Australia/Tasmania", "Australia/Tasmania"),
|
||||
("Australia/Victoria", "Australia/Victoria"),
|
||||
("Australia/West", "Australia/West"),
|
||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
||||
("Brazil/Acre", "Brazil/Acre"),
|
||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
||||
("Brazil/East", "Brazil/East"),
|
||||
("Brazil/West", "Brazil/West"),
|
||||
("CET", "CET"),
|
||||
("CST6CDT", "CST6CDT"),
|
||||
("Canada/Atlantic", "Canada/Atlantic"),
|
||||
("Canada/Central", "Canada/Central"),
|
||||
("Canada/Eastern", "Canada/Eastern"),
|
||||
("Canada/Mountain", "Canada/Mountain"),
|
||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
||||
("Canada/Pacific", "Canada/Pacific"),
|
||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
||||
("Canada/Yukon", "Canada/Yukon"),
|
||||
("Chile/Continental", "Chile/Continental"),
|
||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
||||
("Cuba", "Cuba"),
|
||||
("EET", "EET"),
|
||||
("EST", "EST"),
|
||||
("EST5EDT", "EST5EDT"),
|
||||
("Egypt", "Egypt"),
|
||||
("Eire", "Eire"),
|
||||
("Etc/GMT", "Etc/GMT"),
|
||||
("Etc/GMT+0", "Etc/GMT+0"),
|
||||
("Etc/GMT+1", "Etc/GMT+1"),
|
||||
("Etc/GMT+10", "Etc/GMT+10"),
|
||||
("Etc/GMT+11", "Etc/GMT+11"),
|
||||
("Etc/GMT+12", "Etc/GMT+12"),
|
||||
("Etc/GMT+2", "Etc/GMT+2"),
|
||||
("Etc/GMT+3", "Etc/GMT+3"),
|
||||
("Etc/GMT+4", "Etc/GMT+4"),
|
||||
("Etc/GMT+5", "Etc/GMT+5"),
|
||||
("Etc/GMT+6", "Etc/GMT+6"),
|
||||
("Etc/GMT+7", "Etc/GMT+7"),
|
||||
("Etc/GMT+8", "Etc/GMT+8"),
|
||||
("Etc/GMT+9", "Etc/GMT+9"),
|
||||
("Etc/GMT-0", "Etc/GMT-0"),
|
||||
("Etc/GMT-1", "Etc/GMT-1"),
|
||||
("Etc/GMT-10", "Etc/GMT-10"),
|
||||
("Etc/GMT-11", "Etc/GMT-11"),
|
||||
("Etc/GMT-12", "Etc/GMT-12"),
|
||||
("Etc/GMT-13", "Etc/GMT-13"),
|
||||
("Etc/GMT-14", "Etc/GMT-14"),
|
||||
("Etc/GMT-2", "Etc/GMT-2"),
|
||||
("Etc/GMT-3", "Etc/GMT-3"),
|
||||
("Etc/GMT-4", "Etc/GMT-4"),
|
||||
("Etc/GMT-5", "Etc/GMT-5"),
|
||||
("Etc/GMT-6", "Etc/GMT-6"),
|
||||
("Etc/GMT-7", "Etc/GMT-7"),
|
||||
("Etc/GMT-8", "Etc/GMT-8"),
|
||||
("Etc/GMT-9", "Etc/GMT-9"),
|
||||
("Etc/GMT0", "Etc/GMT0"),
|
||||
("Etc/Greenwich", "Etc/Greenwich"),
|
||||
("Etc/UCT", "Etc/UCT"),
|
||||
("Etc/UTC", "Etc/UTC"),
|
||||
("Etc/Universal", "Etc/Universal"),
|
||||
("Etc/Zulu", "Etc/Zulu"),
|
||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
||||
("Europe/Andorra", "Europe/Andorra"),
|
||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
||||
("Europe/Athens", "Europe/Athens"),
|
||||
("Europe/Belfast", "Europe/Belfast"),
|
||||
("Europe/Belgrade", "Europe/Belgrade"),
|
||||
("Europe/Berlin", "Europe/Berlin"),
|
||||
("Europe/Bratislava", "Europe/Bratislava"),
|
||||
("Europe/Brussels", "Europe/Brussels"),
|
||||
("Europe/Bucharest", "Europe/Bucharest"),
|
||||
("Europe/Budapest", "Europe/Budapest"),
|
||||
("Europe/Busingen", "Europe/Busingen"),
|
||||
("Europe/Chisinau", "Europe/Chisinau"),
|
||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
||||
("Europe/Dublin", "Europe/Dublin"),
|
||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
||||
("Europe/Guernsey", "Europe/Guernsey"),
|
||||
("Europe/Helsinki", "Europe/Helsinki"),
|
||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
||||
("Europe/Istanbul", "Europe/Istanbul"),
|
||||
("Europe/Jersey", "Europe/Jersey"),
|
||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
||||
("Europe/Kiev", "Europe/Kiev"),
|
||||
("Europe/Kirov", "Europe/Kirov"),
|
||||
("Europe/Kyiv", "Europe/Kyiv"),
|
||||
("Europe/Lisbon", "Europe/Lisbon"),
|
||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
||||
("Europe/London", "Europe/London"),
|
||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
||||
("Europe/Madrid", "Europe/Madrid"),
|
||||
("Europe/Malta", "Europe/Malta"),
|
||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
||||
("Europe/Minsk", "Europe/Minsk"),
|
||||
("Europe/Monaco", "Europe/Monaco"),
|
||||
("Europe/Moscow", "Europe/Moscow"),
|
||||
("Europe/Nicosia", "Europe/Nicosia"),
|
||||
("Europe/Oslo", "Europe/Oslo"),
|
||||
("Europe/Paris", "Europe/Paris"),
|
||||
("Europe/Podgorica", "Europe/Podgorica"),
|
||||
("Europe/Prague", "Europe/Prague"),
|
||||
("Europe/Riga", "Europe/Riga"),
|
||||
("Europe/Rome", "Europe/Rome"),
|
||||
("Europe/Samara", "Europe/Samara"),
|
||||
("Europe/San_Marino", "Europe/San_Marino"),
|
||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
||||
("Europe/Saratov", "Europe/Saratov"),
|
||||
("Europe/Simferopol", "Europe/Simferopol"),
|
||||
("Europe/Skopje", "Europe/Skopje"),
|
||||
("Europe/Sofia", "Europe/Sofia"),
|
||||
("Europe/Stockholm", "Europe/Stockholm"),
|
||||
("Europe/Tallinn", "Europe/Tallinn"),
|
||||
("Europe/Tirane", "Europe/Tirane"),
|
||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
||||
("Europe/Vaduz", "Europe/Vaduz"),
|
||||
("Europe/Vatican", "Europe/Vatican"),
|
||||
("Europe/Vienna", "Europe/Vienna"),
|
||||
("Europe/Vilnius", "Europe/Vilnius"),
|
||||
("Europe/Volgograd", "Europe/Volgograd"),
|
||||
("Europe/Warsaw", "Europe/Warsaw"),
|
||||
("Europe/Zagreb", "Europe/Zagreb"),
|
||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
||||
("Europe/Zurich", "Europe/Zurich"),
|
||||
("GB", "GB"),
|
||||
("GB-Eire", "GB-Eire"),
|
||||
("GMT", "GMT"),
|
||||
("GMT+0", "GMT+0"),
|
||||
("GMT-0", "GMT-0"),
|
||||
("GMT0", "GMT0"),
|
||||
("Greenwich", "Greenwich"),
|
||||
("HST", "HST"),
|
||||
("Hongkong", "Hongkong"),
|
||||
("Iceland", "Iceland"),
|
||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
||||
("Indian/Chagos", "Indian/Chagos"),
|
||||
("Indian/Christmas", "Indian/Christmas"),
|
||||
("Indian/Cocos", "Indian/Cocos"),
|
||||
("Indian/Comoro", "Indian/Comoro"),
|
||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
||||
("Indian/Mahe", "Indian/Mahe"),
|
||||
("Indian/Maldives", "Indian/Maldives"),
|
||||
("Indian/Mauritius", "Indian/Mauritius"),
|
||||
("Indian/Mayotte", "Indian/Mayotte"),
|
||||
("Indian/Reunion", "Indian/Reunion"),
|
||||
("Iran", "Iran"),
|
||||
("Israel", "Israel"),
|
||||
("Jamaica", "Jamaica"),
|
||||
("Japan", "Japan"),
|
||||
("Kwajalein", "Kwajalein"),
|
||||
("Libya", "Libya"),
|
||||
("MET", "MET"),
|
||||
("MST", "MST"),
|
||||
("MST7MDT", "MST7MDT"),
|
||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
||||
("Mexico/General", "Mexico/General"),
|
||||
("NZ", "NZ"),
|
||||
("NZ-CHAT", "NZ-CHAT"),
|
||||
("Navajo", "Navajo"),
|
||||
("PRC", "PRC"),
|
||||
("PST8PDT", "PST8PDT"),
|
||||
("Pacific/Apia", "Pacific/Apia"),
|
||||
("Pacific/Auckland", "Pacific/Auckland"),
|
||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
||||
("Pacific/Chatham", "Pacific/Chatham"),
|
||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
||||
("Pacific/Easter", "Pacific/Easter"),
|
||||
("Pacific/Efate", "Pacific/Efate"),
|
||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
||||
("Pacific/Fiji", "Pacific/Fiji"),
|
||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
||||
("Pacific/Gambier", "Pacific/Gambier"),
|
||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
||||
("Pacific/Guam", "Pacific/Guam"),
|
||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
||||
("Pacific/Johnston", "Pacific/Johnston"),
|
||||
("Pacific/Kanton", "Pacific/Kanton"),
|
||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
||||
("Pacific/Majuro", "Pacific/Majuro"),
|
||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
||||
("Pacific/Midway", "Pacific/Midway"),
|
||||
("Pacific/Nauru", "Pacific/Nauru"),
|
||||
("Pacific/Niue", "Pacific/Niue"),
|
||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
||||
("Pacific/Noumea", "Pacific/Noumea"),
|
||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
||||
("Pacific/Palau", "Pacific/Palau"),
|
||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
||||
("Pacific/Ponape", "Pacific/Ponape"),
|
||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
||||
("Pacific/Saipan", "Pacific/Saipan"),
|
||||
("Pacific/Samoa", "Pacific/Samoa"),
|
||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
||||
("Pacific/Truk", "Pacific/Truk"),
|
||||
("Pacific/Wake", "Pacific/Wake"),
|
||||
("Pacific/Wallis", "Pacific/Wallis"),
|
||||
("Pacific/Yap", "Pacific/Yap"),
|
||||
("Poland", "Poland"),
|
||||
("Portugal", "Portugal"),
|
||||
("ROC", "ROC"),
|
||||
("ROK", "ROK"),
|
||||
("Singapore", "Singapore"),
|
||||
("Turkey", "Turkey"),
|
||||
("UCT", "UCT"),
|
||||
("US/Alaska", "US/Alaska"),
|
||||
("US/Aleutian", "US/Aleutian"),
|
||||
("US/Arizona", "US/Arizona"),
|
||||
("US/Central", "US/Central"),
|
||||
("US/East-Indiana", "US/East-Indiana"),
|
||||
("US/Eastern", "US/Eastern"),
|
||||
("US/Hawaii", "US/Hawaii"),
|
||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
||||
("US/Michigan", "US/Michigan"),
|
||||
("US/Mountain", "US/Mountain"),
|
||||
("US/Pacific", "US/Pacific"),
|
||||
("US/Samoa", "US/Samoa"),
|
||||
("UTC", "UTC"),
|
||||
("Universal", "Universal"),
|
||||
("W-SU", "W-SU"),
|
||||
("WET", "WET"),
|
||||
("Zulu", "Zulu"),
|
||||
],
|
||||
default="America/Los_Angeles",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,4 +1,5 @@
|
||||
import smtplib
|
||||
from contextlib import suppress
|
||||
from email.message import EmailMessage
|
||||
from typing import TYPE_CHECKING, List, Optional, cast
|
||||
|
||||
@@ -108,12 +109,10 @@ class CoreSettings(BaseAuditModel):
|
||||
|
||||
# for install script
|
||||
if not self.pk:
|
||||
try:
|
||||
with suppress(Exception):
|
||||
self.mesh_site = settings.MESH_SITE
|
||||
self.mesh_username = settings.MESH_USERNAME.lower()
|
||||
self.mesh_token = settings.MESH_TOKEN_KEY
|
||||
except:
|
||||
pass
|
||||
|
||||
old_settings = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
@@ -127,10 +126,10 @@ class CoreSettings(BaseAuditModel):
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
if old_settings.workstation_policy != self.workstation_policy:
|
||||
cache.delete_many_pattern(f"site_workstation_*")
|
||||
cache.delete_many_pattern("site_workstation_*")
|
||||
|
||||
if old_settings.server_policy != self.server_policy:
|
||||
cache.delete_many_pattern(f"site_server_*")
|
||||
cache.delete_many_pattern("site_server_*")
|
||||
|
||||
if (
|
||||
old_settings.server_policy != self.server_policy
|
||||
@@ -273,7 +272,6 @@ class CoreSettings(BaseAuditModel):
|
||||
|
||||
|
||||
class CustomField(BaseAuditModel):
|
||||
|
||||
order = models.PositiveIntegerField(default=0)
|
||||
model = models.CharField(max_length=25, choices=CustomFieldModel.choices)
|
||||
type = models.CharField(
|
||||
@@ -315,8 +313,8 @@ class CustomField(BaseAuditModel):
|
||||
return self.default_values_multiple
|
||||
elif self.type == CustomFieldType.CHECKBOX:
|
||||
return self.default_value_bool
|
||||
else:
|
||||
return self.default_value_string
|
||||
|
||||
return self.default_value_string
|
||||
|
||||
def get_or_create_field_value(self, instance):
|
||||
from agents.models import Agent, AgentCustomField
|
||||
@@ -365,6 +363,23 @@ class CodeSignToken(models.Model):
|
||||
|
||||
return r.status_code == 200
|
||||
|
||||
@property
|
||||
def is_expired(self) -> bool:
|
||||
if not self.token:
|
||||
return False
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
settings.CHECK_TOKEN_URL,
|
||||
json={"token": self.token, "api": settings.ALLOWED_HOSTS[0]},
|
||||
headers={"Content-type": "application/json"},
|
||||
timeout=15,
|
||||
)
|
||||
except:
|
||||
return False
|
||||
|
||||
return r.status_code == 401
|
||||
|
||||
def __str__(self):
|
||||
return "Code signing token"
|
||||
|
||||
|
||||
@@ -7,8 +7,8 @@ class CoreSettingsPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_view_core_settings")
|
||||
else:
|
||||
return _has_perm(r, "can_edit_core_settings")
|
||||
|
||||
return _has_perm(r, "can_edit_core_settings")
|
||||
|
||||
|
||||
class URLActionPerms(permissions.BasePermission):
|
||||
@@ -30,5 +30,5 @@ class CustomFieldPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_view_customfields")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_customfields")
|
||||
|
||||
return _has_perm(r, "can_manage_customfields")
|
||||
|
||||
@@ -5,7 +5,6 @@ from .models import CodeSignToken, CoreSettings, CustomField, GlobalKVStore, URL
|
||||
|
||||
|
||||
class CoreSettingsSerializer(serializers.ModelSerializer):
|
||||
|
||||
all_timezones = serializers.SerializerMethodField("all_time_zones")
|
||||
|
||||
def all_time_zones(self, obj):
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
from typing import TYPE_CHECKING, Any, Dict
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Prefetch
|
||||
from django.utils import timezone as djangotime
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from agents.tasks import clear_faults_task, prune_agent_history
|
||||
from alerts.models import Alert
|
||||
from alerts.tasks import prune_resolved_alerts
|
||||
from autotasks.models import TaskResult
|
||||
from autotasks.models import AutomatedTask, TaskResult
|
||||
from checks.models import Check, CheckResult
|
||||
from checks.tasks import prune_check_history
|
||||
from clients.models import Client, Site
|
||||
@@ -20,6 +22,8 @@ from tacticalrmm.constants import (
|
||||
AGENT_DEFER,
|
||||
AGENT_STATUS_ONLINE,
|
||||
AGENT_STATUS_OVERDUE,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
AlertSeverity,
|
||||
AlertType,
|
||||
PAAction,
|
||||
@@ -27,6 +31,8 @@ from tacticalrmm.constants import (
|
||||
TaskStatus,
|
||||
TaskSyncStatus,
|
||||
)
|
||||
from tacticalrmm.helpers import rand_range
|
||||
from tacticalrmm.utils import DjangoConnectionThreadPoolExecutor, redis_lock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.db.models import QuerySet
|
||||
@@ -34,6 +40,10 @@ if TYPE_CHECKING:
|
||||
|
||||
@app.task
|
||||
def core_maintenance_tasks() -> None:
|
||||
AutomatedTask.objects.filter(
|
||||
remove_if_not_scheduled=True, expire_date__lt=djangotime.now()
|
||||
).delete()
|
||||
|
||||
core = get_core_settings()
|
||||
|
||||
# remove old CheckHistory data
|
||||
@@ -62,16 +72,15 @@ def core_maintenance_tasks() -> None:
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_resolved_stuff() -> None:
|
||||
|
||||
def resolve_pending_actions() -> None:
|
||||
# change agent update pending status to completed if agent has just updated
|
||||
actions = (
|
||||
actions: "QuerySet[PendingAction]" = (
|
||||
PendingAction.objects.select_related("agent")
|
||||
.defer("agent__services", "agent__wmi_detail")
|
||||
.filter(action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING)
|
||||
)
|
||||
|
||||
to_update = [
|
||||
to_update: list[int] = [
|
||||
action.id
|
||||
for action in actions
|
||||
if pyver.parse(action.agent.version) == pyver.parse(settings.LATEST_AGENT_VER)
|
||||
@@ -80,7 +89,9 @@ def handle_resolved_stuff() -> None:
|
||||
|
||||
PendingAction.objects.filter(pk__in=to_update).update(status=PAStatus.COMPLETED)
|
||||
|
||||
agent_queryset = (
|
||||
|
||||
def _get_agent_qs() -> "QuerySet[Agent]":
|
||||
qs: "QuerySet[Agent]" = (
|
||||
Agent.objects.defer(*AGENT_DEFER)
|
||||
.select_related(
|
||||
"site__server_policy",
|
||||
@@ -88,6 +99,7 @@ def handle_resolved_stuff() -> None:
|
||||
"site__client__server_policy",
|
||||
"site__client__workstation_policy",
|
||||
"policy",
|
||||
"policy__alert_template",
|
||||
"alert_template",
|
||||
)
|
||||
.prefetch_related(
|
||||
@@ -106,34 +118,83 @@ def handle_resolved_stuff() -> None:
|
||||
"autotasks",
|
||||
)
|
||||
)
|
||||
|
||||
for agent in agent_queryset:
|
||||
if (
|
||||
pyver.parse(agent.version) >= pyver.parse("1.6.0")
|
||||
and agent.status == AGENT_STATUS_ONLINE
|
||||
):
|
||||
# sync scheduled tasks
|
||||
for task in agent.get_tasks_with_policies():
|
||||
if (
|
||||
not task.task_result
|
||||
or task.task_result.sync_status == TaskSyncStatus.INITIAL
|
||||
):
|
||||
task.create_task_on_agent(agent=agent if task.policy else None)
|
||||
elif task.task_result.sync_status == TaskSyncStatus.PENDING_DELETION:
|
||||
task.delete_task_on_agent(agent=agent if task.policy else None)
|
||||
elif task.task_result.sync_status == TaskSyncStatus.NOT_SYNCED:
|
||||
task.modify_task_on_agent(agent=agent if task.policy else None)
|
||||
elif task.task_result.sync_status == TaskSyncStatus.SYNCED:
|
||||
continue
|
||||
|
||||
# handles any alerting actions
|
||||
if Alert.objects.filter(
|
||||
alert_type=AlertType.AVAILABILITY, agent=agent, resolved=False
|
||||
).exists():
|
||||
Alert.handle_alert_resolve(agent)
|
||||
return qs
|
||||
|
||||
|
||||
def _get_failing_data(agents: "QuerySet[Any]") -> Dict[str, bool]:
|
||||
@app.task(bind=True)
|
||||
def resolve_alerts_task(self) -> str:
|
||||
with redis_lock(RESOLVE_ALERTS_LOCK, self.app.oid) as acquired:
|
||||
if not acquired:
|
||||
return f"{self.app.oid} still running"
|
||||
|
||||
# TODO rework this to not use an agent queryset, use Alerts
|
||||
for agent in _get_agent_qs():
|
||||
if (
|
||||
pyver.parse(agent.version) >= pyver.parse("1.6.0")
|
||||
and agent.status == AGENT_STATUS_ONLINE
|
||||
):
|
||||
# handles any alerting actions
|
||||
if Alert.objects.filter(
|
||||
alert_type=AlertType.AVAILABILITY, agent=agent, resolved=False
|
||||
).exists():
|
||||
Alert.handle_alert_resolve(agent)
|
||||
|
||||
return "completed"
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def sync_scheduled_tasks(self) -> str:
|
||||
with redis_lock(SYNC_SCHED_TASK_LOCK, self.app.oid) as acquired:
|
||||
if not acquired:
|
||||
return f"{self.app.oid} still running"
|
||||
|
||||
task_actions = [] # list of tuples
|
||||
for agent in _get_agent_qs():
|
||||
if (
|
||||
pyver.parse(agent.version) >= pyver.parse("1.6.0")
|
||||
and agent.status == AGENT_STATUS_ONLINE
|
||||
):
|
||||
# create a list of tasks to be synced so we can run them in parallel later with thread pool executor
|
||||
for task in agent.get_tasks_with_policies():
|
||||
agent_obj = agent if task.policy else None
|
||||
|
||||
# policy tasks will be an empty dict on initial
|
||||
if (not task.task_result) or (
|
||||
isinstance(task.task_result, TaskResult)
|
||||
and task.task_result.sync_status == TaskSyncStatus.INITIAL
|
||||
):
|
||||
task_actions.append(("create", task.id, agent_obj))
|
||||
elif (
|
||||
isinstance(task.task_result, TaskResult)
|
||||
and task.task_result.sync_status
|
||||
== TaskSyncStatus.PENDING_DELETION
|
||||
):
|
||||
task_actions.append(("delete", task.id, agent_obj))
|
||||
elif (
|
||||
isinstance(task.task_result, TaskResult)
|
||||
and task.task_result.sync_status == TaskSyncStatus.NOT_SYNCED
|
||||
):
|
||||
task_actions.append(("modify", task.id, agent_obj))
|
||||
|
||||
def _handle_task(actions: tuple[str, int, Any]) -> None:
|
||||
time.sleep(rand_range(50, 600))
|
||||
task: "AutomatedTask" = AutomatedTask.objects.get(id=actions[1])
|
||||
if actions[0] == "create":
|
||||
task.create_task_on_agent(agent=actions[2])
|
||||
elif actions[0] == "modify":
|
||||
task.modify_task_on_agent(agent=actions[2])
|
||||
elif actions[0] == "delete":
|
||||
task.delete_task_on_agent(agent=actions[2])
|
||||
|
||||
# TODO this is a janky hack
|
||||
# Rework this with asyncio. Need to rewrite all sync db operations with django's new async api
|
||||
with DjangoConnectionThreadPoolExecutor(max_workers=50) as executor:
|
||||
executor.map(_handle_task, task_actions)
|
||||
|
||||
return "completed"
|
||||
|
||||
|
||||
def _get_failing_data(agents: "QuerySet[Agent]") -> dict[str, bool]:
|
||||
data = {"error": False, "warning": False}
|
||||
for agent in agents:
|
||||
if agent.maintenance_mode:
|
||||
@@ -149,7 +210,6 @@ def _get_failing_data(agents: "QuerySet[Any]") -> Dict[str, bool]:
|
||||
break
|
||||
|
||||
if agent.checks["has_failing_checks"]:
|
||||
|
||||
if agent.checks["warning"]:
|
||||
data["warning"] = True
|
||||
|
||||
@@ -181,32 +241,7 @@ def _get_failing_data(agents: "QuerySet[Any]") -> Dict[str, bool]:
|
||||
|
||||
@app.task
|
||||
def cache_db_fields_task() -> None:
|
||||
qs = (
|
||||
Agent.objects.defer(*AGENT_DEFER)
|
||||
.select_related(
|
||||
"site__server_policy",
|
||||
"site__workstation_policy",
|
||||
"site__client__server_policy",
|
||||
"site__client__workstation_policy",
|
||||
"policy__alert_template",
|
||||
"alert_template",
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"agentchecks",
|
||||
queryset=Check.objects.select_related("script"),
|
||||
),
|
||||
Prefetch(
|
||||
"checkresults",
|
||||
queryset=CheckResult.objects.select_related("assigned_check"),
|
||||
),
|
||||
Prefetch(
|
||||
"taskresults",
|
||||
queryset=TaskResult.objects.select_related("task"),
|
||||
),
|
||||
"autotasks",
|
||||
)
|
||||
)
|
||||
qs = _get_agent_qs()
|
||||
# update client/site failing check fields and agent counts
|
||||
for site in Site.objects.all():
|
||||
agents = qs.filter(site=site)
|
||||
|
||||
@@ -5,19 +5,26 @@ from channels.db import database_sync_to_async
|
||||
from channels.testing import WebsocketCommunicator
|
||||
from django.conf import settings
|
||||
from django.core.management import call_command
|
||||
from django.test import override_settings
|
||||
from model_bakery import baker
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
from agents.models import Agent
|
||||
from core.utils import get_core_settings
|
||||
from core.utils import get_core_settings, get_meshagent_url
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.constants import CONFIG_MGMT_CMDS, CustomFieldModel, PAAction, PAStatus
|
||||
from tacticalrmm.constants import (
|
||||
CONFIG_MGMT_CMDS,
|
||||
CustomFieldModel,
|
||||
MeshAgentIdent,
|
||||
PAAction,
|
||||
PAStatus,
|
||||
)
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .consumers import DashInfo
|
||||
from .models import CustomField, GlobalKVStore, URLAction
|
||||
from .serializers import CustomFieldSerializer, KeyStoreSerializer, URLActionSerializer
|
||||
from .tasks import core_maintenance_tasks, handle_resolved_stuff
|
||||
from .tasks import core_maintenance_tasks, resolve_pending_actions
|
||||
|
||||
|
||||
class TestCodeSign(TacticalTestCase):
|
||||
@@ -55,7 +62,6 @@ class TestConsumers(TacticalTestCase):
|
||||
|
||||
@database_sync_to_async
|
||||
def get_token(self):
|
||||
|
||||
token = Token.objects.create(user=self.john)
|
||||
return token.key
|
||||
|
||||
@@ -104,7 +110,7 @@ class TestCoreTasks(TacticalTestCase):
|
||||
url = "/core/settings/"
|
||||
|
||||
# setup
|
||||
policies = baker.make("automation.Policy", _quantity=2)
|
||||
baker.make("automation.Policy", _quantity=2)
|
||||
# test normal request
|
||||
data = {
|
||||
"smtp_from_email": "newexample@example.com",
|
||||
@@ -122,7 +128,7 @@ class TestCoreTasks(TacticalTestCase):
|
||||
def test_ui_maintenance_actions(self, remove_orphaned_win_tasks, reload_nats):
|
||||
url = "/core/servermaintenance/"
|
||||
|
||||
agents = baker.make_recipe("agents.online_agent", _quantity=3)
|
||||
baker.make_recipe("agents.online_agent", _quantity=3)
|
||||
|
||||
# test with empty data
|
||||
r = self.client.post(url, {})
|
||||
@@ -179,9 +185,7 @@ class TestCoreTasks(TacticalTestCase):
|
||||
url = "/core/customfields/"
|
||||
|
||||
# setup
|
||||
custom_fields = baker.make(
|
||||
"core.CustomField", model=CustomFieldModel.AGENT, _quantity=5
|
||||
)
|
||||
baker.make("core.CustomField", model=CustomFieldModel.AGENT, _quantity=5)
|
||||
baker.make("core.CustomField", model="client", _quantity=5)
|
||||
|
||||
# will error if request invalid
|
||||
@@ -190,7 +194,6 @@ class TestCoreTasks(TacticalTestCase):
|
||||
|
||||
data = {"model": "agent"}
|
||||
r = self.client.patch(url, data)
|
||||
serializer = CustomFieldSerializer(custom_fields, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.data), 5)
|
||||
|
||||
@@ -418,7 +421,7 @@ class TestCoreTasks(TacticalTestCase):
|
||||
|
||||
Agent.objects.update(version=settings.LATEST_AGENT_VER)
|
||||
|
||||
handle_resolved_stuff()
|
||||
resolve_pending_actions()
|
||||
|
||||
complete = PendingAction.objects.filter(
|
||||
action_type=PAAction.AGENT_UPDATE, status=PAStatus.COMPLETED
|
||||
@@ -444,3 +447,56 @@ class TestCorePermissions(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_client()
|
||||
self.setup_coresettings()
|
||||
|
||||
|
||||
class TestCoreUtils(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_get_meshagent_url_standard(self):
|
||||
r = get_meshagent_url(
|
||||
ident=MeshAgentIdent.DARWIN_UNIVERSAL,
|
||||
plat="darwin",
|
||||
mesh_site="https://mesh.example.com",
|
||||
mesh_device_id="abc123",
|
||||
)
|
||||
self.assertEqual(
|
||||
r,
|
||||
"https://mesh.example.com/meshagents?id=abc123&installflags=2&meshinstall=10005",
|
||||
)
|
||||
|
||||
r = get_meshagent_url(
|
||||
ident=MeshAgentIdent.WIN64,
|
||||
plat="windows",
|
||||
mesh_site="https://mesh.example.com",
|
||||
mesh_device_id="abc123",
|
||||
)
|
||||
self.assertEqual(
|
||||
r,
|
||||
"https://mesh.example.com/meshagents?id=4&meshid=abc123&installflags=0",
|
||||
)
|
||||
|
||||
@override_settings(DOCKER_BUILD=True)
|
||||
@override_settings(MESH_WS_URL="ws://tactical-meshcentral:4443")
|
||||
def test_get_meshagent_url_docker(self):
|
||||
r = get_meshagent_url(
|
||||
ident=MeshAgentIdent.DARWIN_UNIVERSAL,
|
||||
plat="darwin",
|
||||
mesh_site="https://mesh.example.com",
|
||||
mesh_device_id="abc123",
|
||||
)
|
||||
self.assertEqual(
|
||||
r,
|
||||
"http://tactical-meshcentral:4443/meshagents?id=abc123&installflags=2&meshinstall=10005",
|
||||
)
|
||||
|
||||
r = get_meshagent_url(
|
||||
ident=MeshAgentIdent.WIN64,
|
||||
plat="windows",
|
||||
mesh_site="https://mesh.example.com",
|
||||
mesh_device_id="abc123",
|
||||
)
|
||||
self.assertEqual(
|
||||
r,
|
||||
"http://tactical-meshcentral:4443/meshagents?id=4&meshid=abc123&installflags=0",
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import json
|
||||
import subprocess
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
from base64 import b64encode
|
||||
from typing import TYPE_CHECKING, Optional, cast
|
||||
|
||||
@@ -11,10 +12,16 @@ from django.core.cache import cache
|
||||
from django.http import FileResponse
|
||||
from meshctrl.utils import get_auth_token
|
||||
|
||||
from tacticalrmm.constants import CORESETTINGS_CACHE_KEY, ROLE_CACHE_PREFIX
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_TBL_PEND_ACTION_CNT_CACHE_PREFIX,
|
||||
CORESETTINGS_CACHE_KEY,
|
||||
ROLE_CACHE_PREFIX,
|
||||
AgentPlat,
|
||||
MeshAgentIdent,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.models import CodeSignToken, CoreSettings
|
||||
from core.models import CoreSettings
|
||||
|
||||
|
||||
class CoreSettingsNotFound(Exception):
|
||||
@@ -23,6 +30,7 @@ class CoreSettingsNotFound(Exception):
|
||||
|
||||
def clear_entire_cache() -> None:
|
||||
cache.delete_many_pattern(f"{ROLE_CACHE_PREFIX}*")
|
||||
cache.delete_many_pattern(f"{AGENT_TBL_PEND_ACTION_CNT_CACHE_PREFIX}*")
|
||||
cache.delete(CORESETTINGS_CACHE_KEY)
|
||||
cache.delete_many_pattern("site_*")
|
||||
cache.delete_many_pattern("agent_*")
|
||||
@@ -47,6 +55,16 @@ def token_is_valid() -> tuple[str, bool]:
|
||||
return "", False
|
||||
|
||||
|
||||
def token_is_expired() -> bool:
|
||||
from core.models import CodeSignToken
|
||||
|
||||
t: "CodeSignToken" = CodeSignToken.objects.first()
|
||||
if not t or not t.token:
|
||||
return False
|
||||
|
||||
return t.is_expired
|
||||
|
||||
|
||||
def get_core_settings() -> "CoreSettings":
|
||||
from core.models import CORESETTINGS_CACHE_KEY, CoreSettings
|
||||
|
||||
@@ -142,3 +160,27 @@ def sysd_svc_is_running(svc: str) -> bool:
|
||||
cmd = ["systemctl", "is-active", "--quiet", svc]
|
||||
r = subprocess.run(cmd, capture_output=True)
|
||||
return not r.returncode
|
||||
|
||||
|
||||
def get_meshagent_url(
|
||||
*, ident: "MeshAgentIdent", plat: str, mesh_site: str, mesh_device_id: str
|
||||
) -> str:
|
||||
if settings.DOCKER_BUILD:
|
||||
base = settings.MESH_WS_URL.replace("ws://", "http://")
|
||||
else:
|
||||
base = mesh_site
|
||||
|
||||
if plat == AgentPlat.WINDOWS:
|
||||
params = {
|
||||
"id": ident,
|
||||
"meshid": mesh_device_id,
|
||||
"installflags": 0,
|
||||
}
|
||||
else:
|
||||
params = {
|
||||
"id": mesh_device_id,
|
||||
"installflags": 2,
|
||||
"meshinstall": ident,
|
||||
}
|
||||
|
||||
return base + "/meshagents?" + urllib.parse.urlencode(params)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
import psutil
|
||||
import pytz
|
||||
@@ -6,8 +7,8 @@ from cryptography import x509
|
||||
from django.conf import settings
|
||||
from django.http import JsonResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.utils import timezone as djangotime
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
@@ -73,6 +74,7 @@ def clear_cache(request):
|
||||
|
||||
@api_view()
|
||||
def dashboard_info(request):
|
||||
from core.utils import token_is_expired
|
||||
from tacticalrmm.utils import get_latest_trmm_ver
|
||||
|
||||
return Response(
|
||||
@@ -93,6 +95,7 @@ def dashboard_info(request):
|
||||
"hosted": getattr(settings, "HOSTED", False),
|
||||
"date_format": request.user.date_format,
|
||||
"default_date_format": get_core_settings().date_format,
|
||||
"token_is_expired": token_is_expired(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -127,9 +130,7 @@ def server_maintenance(request):
|
||||
from autotasks.tasks import remove_orphaned_win_tasks
|
||||
|
||||
remove_orphaned_win_tasks.delay()
|
||||
return Response(
|
||||
"The task has been initiated. Check the Debug Log in the UI for progress."
|
||||
)
|
||||
return Response("The task has been initiated.")
|
||||
|
||||
if request.data["action"] == "prune_db":
|
||||
from logs.models import AuditLog, PendingAction
|
||||
@@ -175,8 +176,8 @@ class GetAddCustomFields(APIView):
|
||||
if "model" in request.data.keys():
|
||||
fields = CustomField.objects.filter(model=request.data["model"])
|
||||
return Response(CustomFieldSerializer(fields, many=True).data)
|
||||
else:
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
def post(self, request):
|
||||
serializer = CustomFieldSerializer(data=request.data, partial=True)
|
||||
@@ -231,7 +232,7 @@ class CodeSign(APIView):
|
||||
except Exception as e:
|
||||
return notify_error(str(e))
|
||||
|
||||
if r.status_code == 400 or r.status_code == 401:
|
||||
if r.status_code in (400, 401):
|
||||
return notify_error(r.json()["ret"])
|
||||
elif r.status_code == 200:
|
||||
t = CodeSignToken.objects.first()
|
||||
@@ -389,7 +390,6 @@ class TwilioSMSTest(APIView):
|
||||
permission_classes = [IsAuthenticated, CoreSettingsPerms]
|
||||
|
||||
def post(self, request):
|
||||
|
||||
core = get_core_settings()
|
||||
if not core.sms_is_configured:
|
||||
return notify_error(
|
||||
@@ -406,7 +406,6 @@ class TwilioSMSTest(APIView):
|
||||
@csrf_exempt
|
||||
@monitoring_view
|
||||
def status(request):
|
||||
|
||||
from agents.models import Agent
|
||||
from clients.models import Client, Site
|
||||
|
||||
@@ -414,8 +413,7 @@ def status(request):
|
||||
mem_usage: int = round(psutil.virtual_memory().percent)
|
||||
|
||||
cert_file, _ = get_certs()
|
||||
with open(cert_file, "rb") as f:
|
||||
cert_bytes = f.read()
|
||||
cert_bytes = Path(cert_file).read_bytes()
|
||||
|
||||
cert = x509.load_pem_x509_certificate(cert_bytes)
|
||||
expires = pytz.utc.localize(cert.not_valid_after)
|
||||
|
||||
@@ -5,4 +5,4 @@ class LogsConfig(AppConfig):
|
||||
name = "logs"
|
||||
|
||||
def ready(self):
|
||||
from . import signals
|
||||
from . import signals # noqa
|
||||
|
||||
@@ -41,7 +41,6 @@ class AuditLog(models.Model):
|
||||
return f"{self.username} {self.action} {self.object_type}"
|
||||
|
||||
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||
|
||||
if not self.pk and self.message:
|
||||
# truncate message field if longer than 255 characters
|
||||
self.message = (
|
||||
@@ -282,7 +281,7 @@ class DebugLog(models.Model):
|
||||
agent: "Optional[Agent]" = None,
|
||||
log_type: str = DebugLogType.SYSTEM_ISSUES,
|
||||
) -> None:
|
||||
if get_debug_level() in [DebugLogLevel.INFO]:
|
||||
if get_debug_level() == DebugLogLevel.INFO:
|
||||
cls.objects.create(
|
||||
log_level=DebugLogLevel.INFO,
|
||||
agent=agent,
|
||||
@@ -297,7 +296,7 @@ class DebugLog(models.Model):
|
||||
agent: "Optional[Agent]" = None,
|
||||
log_type: str = DebugLogType.SYSTEM_ISSUES,
|
||||
) -> None:
|
||||
if get_debug_level() in [DebugLogLevel.INFO, DebugLogLevel.WARN]:
|
||||
if get_debug_level() in (DebugLogLevel.INFO, DebugLogLevel.WARN):
|
||||
cls.objects.create(
|
||||
log_level=DebugLogLevel.INFO,
|
||||
agent=agent,
|
||||
@@ -312,11 +311,11 @@ class DebugLog(models.Model):
|
||||
agent: "Optional[Agent]" = None,
|
||||
log_type: str = DebugLogType.SYSTEM_ISSUES,
|
||||
) -> None:
|
||||
if get_debug_level() in [
|
||||
if get_debug_level() in (
|
||||
DebugLogLevel.INFO,
|
||||
DebugLogLevel.WARN,
|
||||
DebugLogLevel.ERROR,
|
||||
]:
|
||||
):
|
||||
cls.objects.create(
|
||||
log_level=DebugLogLevel.ERROR,
|
||||
agent=agent,
|
||||
@@ -331,12 +330,12 @@ class DebugLog(models.Model):
|
||||
agent: "Optional[Agent]" = None,
|
||||
log_type: str = DebugLogType.SYSTEM_ISSUES,
|
||||
) -> None:
|
||||
if get_debug_level() in [
|
||||
if get_debug_level() in (
|
||||
DebugLogLevel.INFO,
|
||||
DebugLogLevel.WARN,
|
||||
DebugLogLevel.ERROR,
|
||||
DebugLogLevel.CRITICAL,
|
||||
]:
|
||||
):
|
||||
cls.objects.create(
|
||||
log_level=DebugLogLevel.CRITICAL,
|
||||
agent=agent,
|
||||
@@ -346,7 +345,6 @@ class DebugLog(models.Model):
|
||||
|
||||
|
||||
class PendingAction(models.Model):
|
||||
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
agent = models.ForeignKey(
|
||||
@@ -376,8 +374,8 @@ class PendingAction(models.Model):
|
||||
return "Next update cycle"
|
||||
elif self.action_type == PAAction.CHOCO_INSTALL:
|
||||
return "ASAP"
|
||||
else:
|
||||
return "On next checkin"
|
||||
|
||||
return "On next checkin"
|
||||
|
||||
@property
|
||||
def description(self) -> Optional[str]:
|
||||
@@ -390,15 +388,15 @@ class PendingAction(models.Model):
|
||||
elif self.action_type == PAAction.CHOCO_INSTALL:
|
||||
return f"{self.details['name']} software install"
|
||||
|
||||
elif self.action_type in [
|
||||
elif self.action_type in (
|
||||
PAAction.RUN_CMD,
|
||||
PAAction.RUN_SCRIPT,
|
||||
PAAction.RUN_PATCH_SCAN,
|
||||
PAAction.RUN_PATCH_INSTALL,
|
||||
]:
|
||||
):
|
||||
return f"{self.action_type}"
|
||||
else:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class BaseAuditModel(models.Model):
|
||||
@@ -417,10 +415,8 @@ class BaseAuditModel(models.Model):
|
||||
pass
|
||||
|
||||
def save(self, old_model: Optional[models.Model] = None, *args, **kwargs) -> None:
|
||||
|
||||
username = get_username()
|
||||
if username:
|
||||
|
||||
object_class = type(self)
|
||||
object_name = object_class.__name__.lower()
|
||||
after_value = object_class.serialize(self)
|
||||
@@ -441,7 +437,6 @@ class BaseAuditModel(models.Model):
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
else:
|
||||
|
||||
if old_model:
|
||||
before_value = object_class.serialize(old_model)
|
||||
else:
|
||||
@@ -450,7 +445,6 @@ class BaseAuditModel(models.Model):
|
||||
)
|
||||
# only create an audit entry if the values have changed
|
||||
if before_value != after_value and username:
|
||||
|
||||
AuditLog.audit_object_changed(
|
||||
username,
|
||||
object_class.__name__.lower(),
|
||||
|
||||
@@ -16,8 +16,8 @@ class AuditLogSerializer(serializers.ModelSerializer):
|
||||
return SiteMinimumSerializer(
|
||||
Agent.objects.get(agent_id=obj.agent_id).site
|
||||
).data
|
||||
else:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
class Meta:
|
||||
model = AuditLog
|
||||
|
||||
@@ -17,7 +17,6 @@ def handle_status(sender, instance: PendingAction, **kwargs):
|
||||
instance.action_type == PAAction.SCHED_REBOOT
|
||||
and instance.status == PAStatus.PENDING
|
||||
):
|
||||
|
||||
reboot_time = dt.datetime.strptime(
|
||||
instance.details["time"], "%Y-%m-%d %H:%M:%S"
|
||||
)
|
||||
|
||||
@@ -16,7 +16,6 @@ class TestAuditViews(TacticalTestCase):
|
||||
self.setup_coresettings()
|
||||
|
||||
def create_audit_records(self):
|
||||
|
||||
# create clients for client filter
|
||||
site = baker.make("clients.Site")
|
||||
agent1 = baker.make_recipe("agents.agent", site=site, hostname="AgentHostname1")
|
||||
@@ -250,7 +249,7 @@ class TestAuditViews(TacticalTestCase):
|
||||
_quantity=4,
|
||||
)
|
||||
|
||||
logs = baker.make(
|
||||
logs = baker.make( # noqa
|
||||
"logs.DebugLog",
|
||||
log_type=DebugLogType.SYSTEM_ISSUES,
|
||||
log_level=cycle([i.value for i in DebugLogLevel]),
|
||||
@@ -322,7 +321,6 @@ class TestAuditViews(TacticalTestCase):
|
||||
self.assertEqual(len(response.data["audit_logs"]), 63)
|
||||
|
||||
def test_debuglog_permissions(self):
|
||||
|
||||
# create data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent2 = baker.make_recipe("agents.agent")
|
||||
@@ -391,8 +389,8 @@ class TestAuditViews(TacticalTestCase):
|
||||
def test_get_pendingaction_permissions(self):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||
actions = baker.make("logs.PendingAction", agent=agent, _quantity=5)
|
||||
unauthorized_actions = baker.make(
|
||||
actions = baker.make("logs.PendingAction", agent=agent, _quantity=5) # noqa
|
||||
unauthorized_actions = baker.make( # noqa
|
||||
"logs.PendingAction", agent=unauthorized_agent, _quantity=7
|
||||
)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ django-extensions
|
||||
isort
|
||||
types-pytz
|
||||
django-silk
|
||||
mypy
|
||||
mypy==0.982
|
||||
django-stubs
|
||||
djangorestframework-stubs
|
||||
django-types
|
||||
|
||||
@@ -5,4 +5,6 @@ pytest
|
||||
pytest-django
|
||||
pytest-xdist
|
||||
pytest-cov
|
||||
codecov
|
||||
codecov
|
||||
refurb
|
||||
flake8
|
||||
@@ -1,39 +1,40 @@
|
||||
asgiref==3.5.2
|
||||
adrf==0.1.1
|
||||
asgiref==3.6.0
|
||||
celery==5.2.7
|
||||
certifi==2022.6.15.1
|
||||
certifi==2022.12.7
|
||||
cffi==1.15.1
|
||||
channels==3.0.5
|
||||
channels_redis==3.4.1
|
||||
channels==4.0.0
|
||||
channels_redis==4.0.0
|
||||
chardet==4.0.0
|
||||
cryptography==38.0.1
|
||||
daphne==3.0.2
|
||||
Django==4.1.1
|
||||
django-cors-headers==3.13.0
|
||||
django-ipware==4.0.2
|
||||
cryptography==39.0.2
|
||||
daphne==4.0.0
|
||||
Django==4.1.7
|
||||
django-cors-headers==3.14.0
|
||||
django-ipware==5.0.0
|
||||
django-rest-knox==4.2.0
|
||||
djangorestframework==3.13.1
|
||||
future==0.18.2
|
||||
msgpack==1.0.4
|
||||
nats-py==2.1.7
|
||||
psutil==5.9.2
|
||||
psycopg2-binary==2.9.3
|
||||
djangorestframework==3.14.0
|
||||
drf-spectacular==0.26.1
|
||||
hiredis==2.2.2
|
||||
meshctrl==0.1.15
|
||||
msgpack==1.0.5
|
||||
nats-py==2.2.0
|
||||
packaging==23.0
|
||||
psutil==5.9.4
|
||||
psycopg2-binary==2.9.5
|
||||
pycparser==2.21
|
||||
pycryptodome==3.15.0
|
||||
pyotp==2.7.0
|
||||
pycryptodome==3.17
|
||||
pyotp==2.8.0
|
||||
pyparsing==3.0.9
|
||||
pytz==2022.2.1
|
||||
qrcode==7.3.1
|
||||
redis==4.3.4
|
||||
hiredis==2.0.0
|
||||
requests==2.28.1
|
||||
pytz==2022.7.1
|
||||
qrcode==7.4.2
|
||||
redis==4.3.5
|
||||
requests==2.28.2
|
||||
six==1.16.0
|
||||
sqlparse==0.4.2
|
||||
twilio==7.14.0
|
||||
urllib3==1.26.12
|
||||
uWSGI==2.0.20
|
||||
sqlparse==0.4.3
|
||||
twilio==7.16.5
|
||||
urllib3==1.26.15
|
||||
uWSGI==2.0.21
|
||||
validators==0.20.0
|
||||
vine==5.0.0
|
||||
websockets==10.3
|
||||
zipp==3.8.1
|
||||
drf-spectacular==0.23.1
|
||||
meshctrl==0.1.15
|
||||
websockets==10.4
|
||||
zipp==3.15.0
|
||||
|
||||
25
api/tacticalrmm/scripts/migrations/0019_script_env_vars.py
Normal file
25
api/tacticalrmm/scripts/migrations/0019_script_env_vars.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 4.1.3 on 2022-11-26 01:38
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("scripts", "0018_script_run_as_user"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="script",
|
||||
name="env_vars",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(blank=True, null=True),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -29,6 +29,12 @@ class Script(BaseAuditModel):
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
env_vars = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
syntax = TextField(null=True, blank=True)
|
||||
favorite = models.BooleanField(default=False)
|
||||
category = models.CharField(max_length=100, null=True, blank=True)
|
||||
@@ -47,7 +53,7 @@ class Script(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def code_no_snippets(self):
|
||||
return self.script_body if self.script_body else ""
|
||||
return self.script_body or ""
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
@@ -66,11 +72,12 @@ class Script(BaseAuditModel):
|
||||
else:
|
||||
value = ""
|
||||
|
||||
replaced_code = re.sub(snippet.group(), value, replaced_code)
|
||||
|
||||
replaced_code = re.sub(
|
||||
snippet.group(), value.replace("\\", "\\\\"), replaced_code
|
||||
)
|
||||
return replaced_code
|
||||
else:
|
||||
return code
|
||||
|
||||
return code
|
||||
|
||||
def hash_script_body(self):
|
||||
from django.conf import settings
|
||||
@@ -112,14 +119,14 @@ class Script(BaseAuditModel):
|
||||
else 90
|
||||
)
|
||||
|
||||
args = script["args"] if "args" in script.keys() else list()
|
||||
args = script["args"] if "args" in script.keys() else []
|
||||
|
||||
syntax = script["syntax"] if "syntax" in script.keys() else ""
|
||||
|
||||
supported_platforms = (
|
||||
script["supported_platforms"]
|
||||
if "supported_platforms" in script.keys()
|
||||
else list()
|
||||
else []
|
||||
)
|
||||
|
||||
# if community script exists update it
|
||||
@@ -187,12 +194,11 @@ class Script(BaseAuditModel):
|
||||
return ScriptSerializer(script).data
|
||||
|
||||
@classmethod
|
||||
def parse_script_args(cls, agent, shell: str, args: List[str] = list()) -> list:
|
||||
|
||||
def parse_script_args(cls, agent, shell: str, args: List[str] = []) -> list:
|
||||
if not args:
|
||||
return []
|
||||
|
||||
temp_args = list()
|
||||
temp_args = []
|
||||
|
||||
# pattern to match for injection
|
||||
pattern = re.compile(".*\\{\\{(.*)\\}\\}.*")
|
||||
@@ -206,7 +212,7 @@ class Script(BaseAuditModel):
|
||||
string=string,
|
||||
instance=agent,
|
||||
shell=shell,
|
||||
quotes=True if shell != ScriptShell.CMD else False,
|
||||
quotes=shell != ScriptShell.CMD,
|
||||
)
|
||||
|
||||
if value:
|
||||
|
||||
@@ -7,5 +7,5 @@ class ScriptsPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_scripts")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_scripts")
|
||||
|
||||
return _has_perm(r, "can_manage_scripts")
|
||||
|
||||
@@ -21,6 +21,7 @@ class ScriptTableSerializer(ModelSerializer):
|
||||
"hidden",
|
||||
"supported_platforms",
|
||||
"run_as_user",
|
||||
"env_vars",
|
||||
]
|
||||
|
||||
|
||||
@@ -45,6 +46,7 @@ class ScriptSerializer(ModelSerializer):
|
||||
"hidden",
|
||||
"supported_platforms",
|
||||
"run_as_user",
|
||||
"env_vars",
|
||||
]
|
||||
|
||||
|
||||
@@ -54,7 +56,7 @@ class ScriptCheckSerializer(ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Script
|
||||
fields = ["code", "shell", "run_as_user", "script_hash"]
|
||||
fields = ["code", "shell", "run_as_user", "env_vars", "script_hash"]
|
||||
|
||||
|
||||
class ScriptSnippetSerializer(ModelSerializer):
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
import asyncio
|
||||
from typing import List
|
||||
from typing import TYPE_CHECKING, List
|
||||
|
||||
import msgpack
|
||||
import nats
|
||||
|
||||
from agents.models import Agent, AgentHistory
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.constants import AgentHistoryType
|
||||
from tacticalrmm.helpers import setup_nats_options
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from nats.aio.client import Client as NATSClient
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -16,6 +23,7 @@ def handle_bulk_command_task(
|
||||
username,
|
||||
run_as_user: bool = False,
|
||||
) -> None:
|
||||
items = []
|
||||
nats_data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
@@ -33,9 +41,26 @@ def handle_bulk_command_task(
|
||||
command=cmd,
|
||||
username=username,
|
||||
)
|
||||
nats_data["id"] = hist.pk
|
||||
tmp = {**nats_data}
|
||||
tmp["id"] = hist.pk
|
||||
items.append((agent.agent_id, tmp))
|
||||
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
async def _run_cmd(nc: "NATSClient", sub, data) -> None:
|
||||
await nc.publish(subject=sub, payload=msgpack.dumps(data))
|
||||
|
||||
async def _run() -> None:
|
||||
opts = setup_nats_options()
|
||||
try:
|
||||
nc = await nats.connect(**opts)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return
|
||||
|
||||
tasks = [_run_cmd(nc=nc, sub=item[0], data=item[1]) for item in items]
|
||||
await asyncio.gather(*tasks)
|
||||
await nc.close()
|
||||
|
||||
asyncio.run(_run())
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -46,6 +71,7 @@ def handle_bulk_script_task(
|
||||
timeout: int,
|
||||
username: str,
|
||||
run_as_user: bool = False,
|
||||
env_vars: list[str] = [],
|
||||
) -> None:
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
agent: "Agent"
|
||||
@@ -62,4 +88,5 @@ def handle_bulk_script_task(
|
||||
timeout=timeout,
|
||||
history_pk=hist.pk,
|
||||
run_as_user=run_as_user,
|
||||
env_vars=env_vars,
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user