Compare commits
144 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eca8f32570 | ||
|
|
8d1ef19c61 | ||
|
|
71d87d866b | ||
|
|
c4f88bdce7 | ||
|
|
f722a115b1 | ||
|
|
1583beea7b | ||
|
|
5b388c587b | ||
|
|
e254923167 | ||
|
|
b0dbdd7803 | ||
|
|
aa6ebe0122 | ||
|
|
c5f179bab8 | ||
|
|
e65cb86638 | ||
|
|
a349998640 | ||
|
|
43f60610b8 | ||
|
|
46d042087a | ||
|
|
ee214727f6 | ||
|
|
b4c1ec55ec | ||
|
|
0fdd54f710 | ||
|
|
4f0cdeaec0 | ||
|
|
e5cc38857c | ||
|
|
fe4b9d71c0 | ||
|
|
5c1181e40e | ||
|
|
8b71832bc2 | ||
|
|
8412ed6065 | ||
|
|
207f6cdc7c | ||
|
|
b0b51f5730 | ||
|
|
def6833ef0 | ||
|
|
c528dd3de1 | ||
|
|
544270e35d | ||
|
|
657e029fee | ||
|
|
49469d7689 | ||
|
|
4f0dd452c8 | ||
|
|
3f741eab11 | ||
|
|
190368788f | ||
|
|
8306a3f566 | ||
|
|
988c134c09 | ||
|
|
af0a4d578b | ||
|
|
9bc0abc831 | ||
|
|
41410e99e7 | ||
|
|
deae04d5ff | ||
|
|
7d6eeffd66 | ||
|
|
629858e095 | ||
|
|
dfdb628347 | ||
|
|
6e48b28fc9 | ||
|
|
3ba450e837 | ||
|
|
688ed93500 | ||
|
|
7268ba20a2 | ||
|
|
63d9e73098 | ||
|
|
564c048f90 | ||
|
|
5f801c74d5 | ||
|
|
b405fbc09a | ||
|
|
7a64c2eb49 | ||
|
|
c93cbac3b1 | ||
|
|
8b0f67b8a6 | ||
|
|
0d96129f2d | ||
|
|
54ee12d2b3 | ||
|
|
92fc042103 | ||
|
|
9bb7016fa7 | ||
|
|
3ad56feafb | ||
|
|
14d59c3dec | ||
|
|
443f419770 | ||
|
|
ddbb58755e | ||
|
|
524283b9ff | ||
|
|
fb178d2944 | ||
|
|
52f4ad9403 | ||
|
|
ba0c08ef1f | ||
|
|
9e19b1e04c | ||
|
|
b2118201b1 | ||
|
|
b4346aa056 | ||
|
|
b599f05aab | ||
|
|
93d78a0200 | ||
|
|
449957b2eb | ||
|
|
0a6d44bad3 | ||
|
|
17ceaaa503 | ||
|
|
d70803b416 | ||
|
|
aa414d4702 | ||
|
|
f24e1b91ea | ||
|
|
1df8163090 | ||
|
|
659ddf6a45 | ||
|
|
e110068da4 | ||
|
|
c943f6f936 | ||
|
|
cb1fe7fe54 | ||
|
|
593f1f63cc | ||
|
|
66aa70cf75 | ||
|
|
304be99067 | ||
|
|
9a01ec35f4 | ||
|
|
bfa5b4fba5 | ||
|
|
d2f63ef353 | ||
|
|
50f334425e | ||
|
|
f78212073c | ||
|
|
5c655f5a82 | ||
|
|
6a6446bfcb | ||
|
|
b60a3a5e50 | ||
|
|
02ccbab8e5 | ||
|
|
023ff3f964 | ||
|
|
7c5e8df3b8 | ||
|
|
56fdab260b | ||
|
|
7cce49dc1a | ||
|
|
2dfaafb20b | ||
|
|
6138a5bf54 | ||
|
|
828c67cc00 | ||
|
|
e70cd44e18 | ||
|
|
efa5ac5edd | ||
|
|
788b11e759 | ||
|
|
d049d7a61f | ||
|
|
075c833b58 | ||
|
|
e9309c2a96 | ||
|
|
a592d2b397 | ||
|
|
3ad1805ac0 | ||
|
|
dbc2bab698 | ||
|
|
79eec5c299 | ||
|
|
7754b0c575 | ||
|
|
be4289ce76 | ||
|
|
67f5226270 | ||
|
|
b6d77c581b | ||
|
|
d84bf47d04 | ||
|
|
aba3a7bb9e | ||
|
|
6281736d89 | ||
|
|
94d96f89d3 | ||
|
|
4b55f9dead | ||
|
|
5c6dce94df | ||
|
|
f7d8f9c7f5 | ||
|
|
053df24f9c | ||
|
|
1dc470e434 | ||
|
|
cfd8773267 | ||
|
|
67045cf6c1 | ||
|
|
ddfb9e7239 | ||
|
|
9f6eed5472 | ||
|
|
15a1e2ebcb | ||
|
|
fcfe450b07 | ||
|
|
a69bbb3bc9 | ||
|
|
6d2559cfc1 | ||
|
|
b3a62615f3 | ||
|
|
57f5cca1cb | ||
|
|
6b9851f540 | ||
|
|
36fd203a88 | ||
|
|
3f5cb5d61c | ||
|
|
862fc6a946 | ||
|
|
92c386ac0e | ||
|
|
98a11a3645 | ||
|
|
62be0ed936 | ||
|
|
b7de73fd8a | ||
|
|
e2413f1af2 | ||
|
|
0e77d575c4 |
@@ -26,6 +26,6 @@ POSTGRES_PASS=postgrespass
|
||||
APP_PORT=80
|
||||
API_PORT=80
|
||||
HTTP_PROTOCOL=https
|
||||
DOCKER_NETWORK="172.21.0.0/24"
|
||||
DOCKER_NGINX_IP="172.21.0.20"
|
||||
NATS_PORTS="4222:4222"
|
||||
DOCKER_NETWORK=172.21.0.0/24
|
||||
DOCKER_NGINX_IP=172.21.0.20
|
||||
NATS_PORTS=4222:4222
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.9.2-slim
|
||||
FROM python:3.9.6-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
@@ -3,6 +3,7 @@ asyncio-nats-client
|
||||
celery
|
||||
channels
|
||||
channels_redis
|
||||
django-ipware
|
||||
Django
|
||||
django-cors-headers
|
||||
django-rest-knox
|
||||
|
||||
@@ -9,7 +9,7 @@ Tactical RMM is a remote monitoring & management tool for Windows computers, bui
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||
|
||||
@@ -35,4 +35,4 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
||||
|
||||
## Installation / Backup / Restore / Usage
|
||||
|
||||
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-20 20:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0023_user_is_installer_user'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='last_login_ip',
|
||||
field=models.GenericIPAddressField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0024_user_last_login_ip'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -48,6 +48,7 @@ class User(AbstractUser, BaseAuditModel):
|
||||
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||
clear_search_when_switching = models.BooleanField(default=True)
|
||||
is_installer_user = models.BooleanField(default=False)
|
||||
last_login_ip = models.GenericIPAddressField(default=None, blank=True, null=True)
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
@@ -73,7 +74,7 @@ class User(AbstractUser, BaseAuditModel):
|
||||
return UserSerializer(user).data
|
||||
|
||||
|
||||
class Role(models.Model):
|
||||
class Role(BaseAuditModel):
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
is_superuser = models.BooleanField(default=False)
|
||||
|
||||
@@ -140,6 +141,13 @@ class Role(models.Model):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(role):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import RoleAuditSerializer
|
||||
|
||||
return RoleAuditSerializer(role).data
|
||||
|
||||
@staticmethod
|
||||
def perms():
|
||||
return [
|
||||
|
||||
@@ -31,6 +31,7 @@ class UserSerializer(ModelSerializer):
|
||||
"email",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"last_login_ip",
|
||||
"role",
|
||||
]
|
||||
|
||||
@@ -57,3 +58,9 @@ class RoleSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class RoleAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = "__all__"
|
||||
|
||||
@@ -3,23 +3,23 @@ from django.conf import settings
|
||||
from django.contrib.auth import login
|
||||
from django.db import IntegrityError
|
||||
from django.shortcuts import get_object_or_404
|
||||
from ipware import get_client_ip
|
||||
from knox.views import LoginView as KnoxLoginView
|
||||
from logs.models import AuditLog
|
||||
from rest_framework import status
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import User, Role
|
||||
from .models import Role, User
|
||||
from .permissions import AccountsPerms, RolesPerms
|
||||
from .serializers import (
|
||||
RoleSerializer,
|
||||
TOTPSetupSerializer,
|
||||
UserSerializer,
|
||||
UserUISerializer,
|
||||
RoleSerializer,
|
||||
)
|
||||
|
||||
|
||||
@@ -40,7 +40,9 @@ class CheckCreds(KnoxLoginView):
|
||||
# check credentials
|
||||
serializer = AuthTokenSerializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
AuditLog.audit_user_failed_login(request.data["username"])
|
||||
AuditLog.audit_user_failed_login(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
user = serializer.validated_data["user"]
|
||||
@@ -76,10 +78,20 @@ class LoginView(KnoxLoginView):
|
||||
|
||||
if valid:
|
||||
login(request, user)
|
||||
AuditLog.audit_user_login_successful(request.data["username"])
|
||||
|
||||
# save ip information
|
||||
client_ip, is_routable = get_client_ip(request)
|
||||
user.last_login_ip = client_ip
|
||||
user.save()
|
||||
|
||||
AuditLog.audit_user_login_successful(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return super(LoginView, self).post(request, format=None)
|
||||
else:
|
||||
AuditLog.audit_user_failed_twofactor(request.data["username"])
|
||||
AuditLog.audit_user_failed_twofactor(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@@ -87,7 +99,14 @@ class GetAddUsers(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
def get(self, request):
|
||||
users = User.objects.filter(agent=None, is_installer_user=False)
|
||||
search = request.GET.get("search", None)
|
||||
|
||||
if search:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False).filter(
|
||||
username__icontains=search
|
||||
)
|
||||
else:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False)
|
||||
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction, AgentHistory
|
||||
|
||||
admin.site.register(Agent)
|
||||
admin.site.register(RecoveryAction)
|
||||
admin.site.register(Note)
|
||||
admin.site.register(AgentCustomField)
|
||||
admin.site.register(AgentHistory)
|
||||
|
||||
27
api/tacticalrmm/agents/migrations/0038_agenthistory.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-06 02:01
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0037_auto_20210627_0014'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AgentHistory',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('time', models.DateTimeField(auto_now_add=True)),
|
||||
('type', models.CharField(choices=[('task_run', 'Task Run'), ('script_run', 'Script Run'), ('cmd_run', 'CMD Run')], default='cmd_run', max_length=50)),
|
||||
('command', models.TextField(blank=True, null=True)),
|
||||
('status', models.CharField(choices=[('success', 'Success'), ('failure', 'Failure')], default='success', max_length=50)),
|
||||
('username', models.CharField(default='system', max_length=50)),
|
||||
('results', models.TextField(blank=True, null=True)),
|
||||
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='history', to='agents.agent')),
|
||||
],
|
||||
),
|
||||
]
|
||||
25
api/tacticalrmm/agents/migrations/0039_auto_20210714_0738.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 3.2.5 on 2021-07-14 07:38
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0008_script_guid'),
|
||||
('agents', '0038_agenthistory'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agenthistory',
|
||||
name='script',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='history', to='scripts.script'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='agenthistory',
|
||||
name='script_results',
|
||||
field=models.JSONField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -16,14 +16,12 @@ from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from nats.aio.client import Client as NATS
|
||||
from nats.aio.errors import ErrTimeout
|
||||
from packaging import version as pyver
|
||||
|
||||
from core.models import TZ_CHOICES, CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
|
||||
|
||||
class Agent(BaseAuditModel):
|
||||
@@ -91,8 +89,8 @@ class Agent(BaseAuditModel):
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
# get old agent if exists
|
||||
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
old_agent = Agent.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Agent, self).save(old_model=old_agent, *args, **kwargs)
|
||||
|
||||
# check if new agent has been created
|
||||
# or check if policy have changed on agent
|
||||
@@ -123,7 +121,7 @@ class Agent(BaseAuditModel):
|
||||
else:
|
||||
from core.models import CoreSettings
|
||||
|
||||
return CoreSettings.objects.first().default_time_zone
|
||||
return CoreSettings.objects.first().default_time_zone # type: ignore
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
@@ -325,6 +323,7 @@ class Agent(BaseAuditModel):
|
||||
full: bool = False,
|
||||
wait: bool = False,
|
||||
run_on_any: bool = False,
|
||||
history_pk: int = 0,
|
||||
) -> Any:
|
||||
|
||||
from scripts.models import Script
|
||||
@@ -343,6 +342,9 @@ class Agent(BaseAuditModel):
|
||||
},
|
||||
}
|
||||
|
||||
if history_pk != 0 and pyver.parse(self.version) >= pyver.parse("1.6.0"):
|
||||
data["id"] = history_pk
|
||||
|
||||
running_agent = self
|
||||
if run_on_any:
|
||||
nats_ping = {"func": "ping"}
|
||||
@@ -411,6 +413,12 @@ class Agent(BaseAuditModel):
|
||||
update.action = "approve"
|
||||
update.save(update_fields=["action"])
|
||||
|
||||
DebugLog.info(
|
||||
agent=self,
|
||||
log_type="windows_updates",
|
||||
message=f"Approving windows updates on {self.hostname}",
|
||||
)
|
||||
|
||||
# returns agent policy merged with a client or site specific policy
|
||||
def get_patch_policy(self):
|
||||
|
||||
@@ -445,8 +453,8 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.server_policy
|
||||
and core_settings.server_policy.winupdatepolicy.exists()
|
||||
core_settings.server_policy # type: ignore
|
||||
and core_settings.server_policy.winupdatepolicy.exists() # type: ignore
|
||||
):
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
@@ -454,7 +462,7 @@ class Agent(BaseAuditModel):
|
||||
and not site.block_policy_inheritance
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get()
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get() # type: ignore
|
||||
|
||||
elif self.monitoring_type == "workstation":
|
||||
# check agent policy first which should override client or site policy
|
||||
@@ -483,8 +491,8 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.workstation_policy
|
||||
and core_settings.workstation_policy.winupdatepolicy.exists()
|
||||
core_settings.workstation_policy # type: ignore
|
||||
and core_settings.workstation_policy.winupdatepolicy.exists() # type: ignore
|
||||
):
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
@@ -493,7 +501,7 @@ class Agent(BaseAuditModel):
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = (
|
||||
core_settings.workstation_policy.winupdatepolicy.get()
|
||||
core_settings.workstation_policy.winupdatepolicy.get() # type: ignore
|
||||
)
|
||||
|
||||
# if policy still doesn't exist return the agent patch policy
|
||||
@@ -608,35 +616,35 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# check if alert template is applied globally and return
|
||||
if (
|
||||
core.alert_template
|
||||
and core.alert_template.is_active
|
||||
core.alert_template # type: ignore
|
||||
and core.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.alert_template)
|
||||
templates.append(core.alert_template) # type: ignore
|
||||
|
||||
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
||||
if (
|
||||
self.monitoring_type == "server"
|
||||
and core.server_policy
|
||||
and core.server_policy.alert_template
|
||||
and core.server_policy.alert_template.is_active
|
||||
and core.server_policy # type: ignore
|
||||
and core.server_policy.alert_template # type: ignore
|
||||
and core.server_policy.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.server_policy.alert_template)
|
||||
templates.append(core.server_policy.alert_template) # type: ignore
|
||||
if (
|
||||
self.monitoring_type == "workstation"
|
||||
and core.workstation_policy
|
||||
and core.workstation_policy.alert_template
|
||||
and core.workstation_policy.alert_template.is_active
|
||||
and core.workstation_policy # type: ignore
|
||||
and core.workstation_policy.alert_template # type: ignore
|
||||
and core.workstation_policy.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.workstation_policy.alert_template)
|
||||
templates.append(core.workstation_policy.alert_template) # type: ignore
|
||||
|
||||
# go through the templates and return the first one that isn't excluded
|
||||
for template in templates:
|
||||
@@ -739,7 +747,7 @@ class Agent(BaseAuditModel):
|
||||
try:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(agent=self, log_type="agent_issues", message=e)
|
||||
ret = str(e)
|
||||
|
||||
await nc.close()
|
||||
@@ -752,12 +760,9 @@ class Agent(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(agent):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import AgentEditSerializer
|
||||
from .serializers import AgentAuditSerializer
|
||||
|
||||
ret = AgentEditSerializer(agent).data
|
||||
del ret["all_timezones"]
|
||||
del ret["client"]
|
||||
return ret
|
||||
return AgentAuditSerializer(agent).data
|
||||
|
||||
def delete_superseded_updates(self):
|
||||
try:
|
||||
@@ -772,7 +777,7 @@ class Agent(BaseAuditModel):
|
||||
# skip if no version info is available therefore nothing to parse
|
||||
try:
|
||||
vers = [
|
||||
re.search(r"\(Version(.*?)\)", i).group(1).strip()
|
||||
re.search(r"\(Version(.*?)\)", i).group(1).strip() # type: ignore
|
||||
for i in titles
|
||||
]
|
||||
sorted_vers = sorted(vers, key=LooseVersion)
|
||||
@@ -807,7 +812,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
CORE.send_mail( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
(
|
||||
f"Data has not been received from client {self.client.name}, "
|
||||
@@ -822,7 +827,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
CORE.send_mail( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
(
|
||||
f"Data has been received from client {self.client.name}, "
|
||||
@@ -837,7 +842,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
CORE.send_sms( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
@@ -846,7 +851,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
CORE.send_sms( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
@@ -928,3 +933,57 @@ class AgentCustomField(models.Model):
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif self.field.type == "multiple":
|
||||
self.multiple_value = value.split(",")
|
||||
self.save()
|
||||
elif self.field.type == "checkbox":
|
||||
self.bool_value = bool(value)
|
||||
self.save()
|
||||
|
||||
|
||||
AGENT_HISTORY_TYPES = (
|
||||
("task_run", "Task Run"),
|
||||
("script_run", "Script Run"),
|
||||
("cmd_run", "CMD Run"),
|
||||
)
|
||||
|
||||
AGENT_HISTORY_STATUS = (("success", "Success"), ("failure", "Failure"))
|
||||
|
||||
|
||||
class AgentHistory(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="history",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
time = models.DateTimeField(auto_now_add=True)
|
||||
type = models.CharField(
|
||||
max_length=50, choices=AGENT_HISTORY_TYPES, default="cmd_run"
|
||||
)
|
||||
command = models.TextField(null=True, blank=True)
|
||||
status = models.CharField(
|
||||
max_length=50, choices=AGENT_HISTORY_STATUS, default="success"
|
||||
)
|
||||
username = models.CharField(max_length=50, default="system")
|
||||
results = models.TextField(null=True, blank=True)
|
||||
script = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="history",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_results = models.JSONField(null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.agent.hostname} - {self.type}"
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from clients.serializers import ClientSerializer
|
||||
from rest_framework import serializers
|
||||
from tacticalrmm.utils import get_default_timezone
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, AgentCustomField, Note
|
||||
from .models import Agent, AgentCustomField, Note, AgentHistory
|
||||
|
||||
|
||||
class AgentSerializer(serializers.ModelSerializer):
|
||||
@@ -159,6 +159,7 @@ class AgentEditSerializer(serializers.ModelSerializer):
|
||||
"offline_time",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_dashboard_alert",
|
||||
"all_timezones",
|
||||
"winupdatepolicy",
|
||||
"policy",
|
||||
@@ -200,3 +201,22 @@ class NotesSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = ["hostname", "pk", "notes"]
|
||||
|
||||
|
||||
class AgentHistorySerializer(serializers.ModelSerializer):
|
||||
time = serializers.SerializerMethodField(read_only=True)
|
||||
script_name = serializers.ReadOnlyField(source="script.name")
|
||||
|
||||
class Meta:
|
||||
model = AgentHistory
|
||||
fields = "__all__"
|
||||
|
||||
def get_time(self, history):
|
||||
timezone = get_default_timezone()
|
||||
return history.time.astimezone(timezone).strftime("%m %d %Y %H:%M:%S")
|
||||
|
||||
|
||||
class AgentAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
exclude = ["disks", "services", "wmi_detail"]
|
||||
|
||||
@@ -1,26 +1,21 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import random
|
||||
import tempfile
|
||||
import json
|
||||
import subprocess
|
||||
import urllib.parse
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
from alerts.models import Alert
|
||||
from core.models import CodeSignToken, CoreSettings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from logs.models import DebugLog, PendingAction
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CodeSignToken, CoreSettings
|
||||
from logs.models import PendingAction
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.utils import run_nats_api_cmd
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str:
|
||||
@@ -33,8 +28,10 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(
|
||||
f"Unable to determine arch on {agent.hostname}. Skipping agent update."
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to determine arch on {agent.hostname}({agent.pk}). Skipping agent update.",
|
||||
)
|
||||
return "noarch"
|
||||
|
||||
@@ -81,7 +78,7 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
|
||||
@app.task
|
||||
def force_code_sign(pks: list[int]) -> None:
|
||||
try:
|
||||
token = CodeSignToken.objects.first().token
|
||||
token = CodeSignToken.objects.first().tokenv # type:ignore
|
||||
except:
|
||||
return
|
||||
|
||||
@@ -96,7 +93,7 @@ def force_code_sign(pks: list[int]) -> None:
|
||||
@app.task
|
||||
def send_agent_update_task(pks: list[int]) -> None:
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
codesigntoken = CodeSignToken.objects.first().token # type:ignore
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
@@ -111,11 +108,11 @@ def send_agent_update_task(pks: list[int]) -> None:
|
||||
@app.task
|
||||
def auto_self_agent_update_task() -> None:
|
||||
core = CoreSettings.objects.first()
|
||||
if not core.agent_auto_update:
|
||||
if not core.agent_auto_update: # type:ignore
|
||||
return
|
||||
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
codesigntoken = CodeSignToken.objects.first().token # type:ignore
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
@@ -235,14 +232,24 @@ def run_script_email_results_task(
|
||||
nats_timeout: int,
|
||||
emails: list[str],
|
||||
args: list[str] = [],
|
||||
history_pk: int = 0,
|
||||
):
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk, args=args, full=True, timeout=nats_timeout, wait=True
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
full=True,
|
||||
timeout=nats_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
if r == "timeout":
|
||||
logger.error(f"{agent.hostname} timed out running script.")
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"{agent.hostname}({agent.pk}) timed out running script.",
|
||||
)
|
||||
return
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
@@ -258,28 +265,32 @@ def run_script_email_results_task(
|
||||
|
||||
msg = EmailMessage()
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = CORE.smtp_from_email
|
||||
msg["From"] = CORE.smtp_from_email # type:ignore
|
||||
|
||||
if emails:
|
||||
msg["To"] = ", ".join(emails)
|
||||
else:
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients)
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients) # type:ignore
|
||||
|
||||
msg.set_content(body)
|
||||
|
||||
try:
|
||||
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
|
||||
if CORE.smtp_requires_auth:
|
||||
with smtplib.SMTP(
|
||||
CORE.smtp_host, CORE.smtp_port, timeout=20 # type:ignore
|
||||
) as server: # type:ignore
|
||||
if CORE.smtp_requires_auth: # type:ignore
|
||||
server.ehlo()
|
||||
server.starttls()
|
||||
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
|
||||
server.login(
|
||||
CORE.smtp_host_user, CORE.smtp_host_password # type:ignore
|
||||
) # type:ignore
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
else:
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(message=e)
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -310,15 +321,6 @@ def clear_faults_task(older_than_days: int) -> None:
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
def monitor_agents_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ids = [i.agent_id for i in agents if i.status != "online"]
|
||||
run_nats_api_cmd("monitor", ids)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
@@ -330,18 +332,62 @@ def get_wmi_task() -> None:
|
||||
|
||||
@app.task
|
||||
def agent_checkin_task() -> None:
|
||||
db = settings.DATABASES["default"]
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"user": db["USER"],
|
||||
"pass": db["PASSWORD"],
|
||||
"host": db["HOST"],
|
||||
"port": int(db["PORT"]),
|
||||
"dbname": db["NAME"],
|
||||
}
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", "checkin"]
|
||||
subprocess.run(cmd, timeout=30)
|
||||
run_nats_api_cmd("checkin", timeout=30)
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_getinfo_task() -> None:
|
||||
run_nats_api_cmd("agentinfo", timeout=30)
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_agent_history(older_than_days: int) -> str:
|
||||
from .models import AgentHistory
|
||||
|
||||
AgentHistory.objects.filter(
|
||||
time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_agents_task() -> None:
|
||||
q = Agent.objects.prefetch_related("pendingactions", "autotasks").only(
|
||||
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
agents = [
|
||||
i
|
||||
for i in q
|
||||
if pyver.parse(i.version) >= pyver.parse("1.6.0") and i.status == "online"
|
||||
]
|
||||
for agent in agents:
|
||||
# change agent update pending status to completed if agent has just updated
|
||||
if (
|
||||
pyver.parse(agent.version) == pyver.parse(settings.LATEST_AGENT_VER)
|
||||
and agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists()
|
||||
):
|
||||
agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).update(status="completed")
|
||||
|
||||
# sync scheduled tasks
|
||||
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
|
||||
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
|
||||
|
||||
for task in tasks:
|
||||
if task.sync_status == "pendingdeletion":
|
||||
task.delete_task_on_agent()
|
||||
elif task.sync_status == "initial":
|
||||
task.modify_task_on_agent()
|
||||
elif task.sync_status == "notsynced":
|
||||
task.create_task_on_agent()
|
||||
|
||||
# handles any alerting actions
|
||||
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
||||
try:
|
||||
Alert.handle_alert_resolve(agent)
|
||||
except:
|
||||
continue
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
import json
|
||||
import os
|
||||
from itertools import cycle
|
||||
from django.utils import timezone as djangotime
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from logs.models import PendingAction
|
||||
from model_bakery import baker
|
||||
from packaging import version as pyver
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, AgentCustomField
|
||||
from .serializers import AgentSerializer
|
||||
from .models import Agent, AgentCustomField, AgentHistory
|
||||
from .serializers import AgentHistorySerializer, AgentSerializer
|
||||
from .tasks import auto_self_agent_update_task
|
||||
|
||||
|
||||
@@ -306,7 +305,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"shell": "cmd",
|
||||
"timeout": 30,
|
||||
}
|
||||
mock_ret.return_value = "nt authority\system"
|
||||
mock_ret.return_value = "nt authority\\system"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIsInstance(r.data, str) # type: ignore
|
||||
@@ -437,7 +436,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 1)
|
||||
mesh_recovery = RecoveryAction.objects.first()
|
||||
self.assertEqual(mesh_recovery.mode, "mesh")
|
||||
self.assertEqual(mesh_recovery.mode, "mesh") # type: ignore
|
||||
nats_cmd.reset_mock()
|
||||
RecoveryAction.objects.all().delete()
|
||||
|
||||
@@ -472,8 +471,8 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 1)
|
||||
cmd_recovery = RecoveryAction.objects.first()
|
||||
self.assertEqual(cmd_recovery.mode, "command")
|
||||
self.assertEqual(cmd_recovery.command, "shutdown /r /t 10 /f")
|
||||
self.assertEqual(cmd_recovery.mode, "command") # type: ignore
|
||||
self.assertEqual(cmd_recovery.command, "shutdown /r /t 10 /f") # type: ignore
|
||||
|
||||
def test_agents_agent_detail(self):
|
||||
url = f"/agents/{self.agent.pk}/agentdetail/"
|
||||
@@ -770,6 +769,9 @@ class TestAgentViews(TacticalTestCase):
|
||||
@patch("agents.tasks.run_script_email_results_task.delay")
|
||||
@patch("agents.models.Agent.run_script")
|
||||
def test_run_script(self, run_script, email_task):
|
||||
from .models import AgentCustomField, Note
|
||||
from clients.models import ClientCustomField, SiteCustomField
|
||||
|
||||
run_script.return_value = "ok"
|
||||
url = "/agents/runscript/"
|
||||
script = baker.make_recipe("scripts.script")
|
||||
@@ -777,7 +779,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
# test wait
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"script": script.pk,
|
||||
"output": "wait",
|
||||
"args": [],
|
||||
"timeout": 15,
|
||||
@@ -786,18 +788,18 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk, args=[], timeout=18, wait=True
|
||||
scriptpk=script.pk, args=[], timeout=18, wait=True, history_pk=0
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
# test email default
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"script": script.pk,
|
||||
"output": "email",
|
||||
"args": ["abc", "123"],
|
||||
"timeout": 15,
|
||||
"emailmode": "default",
|
||||
"emailMode": "default",
|
||||
"emails": ["admin@example.com", "bob@example.com"],
|
||||
}
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -812,7 +814,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
email_task.reset_mock()
|
||||
|
||||
# test email overrides
|
||||
data["emailmode"] = "custom"
|
||||
data["emailMode"] = "custom"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
email_task.assert_called_with(
|
||||
@@ -826,7 +828,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
# test fire and forget
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"script": script.pk,
|
||||
"output": "forget",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
@@ -835,8 +837,138 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk, args=["hello", "world"], timeout=25
|
||||
scriptpk=script.pk, args=["hello", "world"], timeout=25, history_pk=0
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
# test collector
|
||||
|
||||
# save to agent custom field
|
||||
custom_field = baker.make("core.CustomField", model="agent")
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "collector",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"custom_field": custom_field.id, # type: ignore
|
||||
"save_all_output": True,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(
|
||||
AgentCustomField.objects.get(agent=self.agent.pk, field=custom_field).value,
|
||||
"ok",
|
||||
)
|
||||
|
||||
# save to site custom field
|
||||
custom_field = baker.make("core.CustomField", model="site")
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "collector",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"custom_field": custom_field.id, # type: ignore
|
||||
"save_all_output": False,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(
|
||||
SiteCustomField.objects.get(
|
||||
site=self.agent.site.pk, field=custom_field
|
||||
).value,
|
||||
"ok",
|
||||
)
|
||||
|
||||
# save to client custom field
|
||||
custom_field = baker.make("core.CustomField", model="client")
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "collector",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"custom_field": custom_field.id, # type: ignore
|
||||
"save_all_output": False,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(
|
||||
ClientCustomField.objects.get(
|
||||
client=self.agent.client.pk, field=custom_field
|
||||
).value,
|
||||
"ok",
|
||||
)
|
||||
|
||||
# test save to note
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "note",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(Note.objects.get(agent=self.agent).note, "ok")
|
||||
|
||||
def test_get_agent_history(self):
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
history = baker.make("agents.AgentHistory", agent=agent, _quantity=30)
|
||||
url = f"/agents/history/{agent.id}/"
|
||||
|
||||
# test agent not found
|
||||
r = self.client.get("/agents/history/500/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
# test pulling data
|
||||
r = self.client.get(url, format="json")
|
||||
data = AgentHistorySerializer(history, many=True).data
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, data) # type:ignore
|
||||
|
||||
|
||||
class TestAgentViewsNew(TacticalTestCase):
|
||||
@@ -1048,3 +1180,25 @@ class TestAgentTasks(TacticalTestCase):
|
||||
|
||||
r = auto_self_agent_update_task.s().apply()
|
||||
self.assertEqual(agent_update.call_count, 33)
|
||||
|
||||
def test_agent_history_prune_task(self):
|
||||
from .tasks import prune_agent_history
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
history = baker.make(
|
||||
"agents.AgentHistory",
|
||||
agent=agent,
|
||||
_quantity=50,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for item in history: # type: ignore
|
||||
item.time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
item.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_agent_history(30)
|
||||
|
||||
self.assertEqual(AgentHistory.objects.filter(agent=agent).count(), 6)
|
||||
|
||||
@@ -29,4 +29,5 @@ urlpatterns = [
|
||||
path("bulk/", views.bulk),
|
||||
path("maintenance/", views.agent_maintenance),
|
||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||
path("history/<int:pk>/", views.AgentHistoryView.as_view()),
|
||||
]
|
||||
|
||||
@@ -8,7 +8,6 @@ import time
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
@@ -17,14 +16,14 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from core.models import CoreSettings
|
||||
from logs.models import AuditLog, PendingAction
|
||||
from logs.models import AuditLog, DebugLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction, AgentHistory
|
||||
from .permissions import (
|
||||
EditAgentPerms,
|
||||
EvtLogPerms,
|
||||
@@ -42,6 +41,7 @@ from .permissions import (
|
||||
from .serializers import (
|
||||
AgentCustomFieldSerializer,
|
||||
AgentEditSerializer,
|
||||
AgentHistorySerializer,
|
||||
AgentHostnameSerializer,
|
||||
AgentOverdueActionSerializer,
|
||||
AgentSerializer,
|
||||
@@ -51,8 +51,6 @@ from .serializers import (
|
||||
)
|
||||
from .tasks import run_script_email_results_task, send_agent_update_task
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view()
|
||||
def get_agent_versions(request):
|
||||
@@ -115,7 +113,7 @@ def uninstall(request):
|
||||
def edit_agent(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||
|
||||
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer = AgentEditSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer.is_valid(raise_exception=True)
|
||||
a_serializer.save()
|
||||
|
||||
@@ -160,17 +158,21 @@ def meshcentral(request, pk):
|
||||
core = CoreSettings.objects.first()
|
||||
|
||||
token = agent.get_login_token(
|
||||
key=core.mesh_token, user=f"user//{core.mesh_username}"
|
||||
key=core.mesh_token, user=f"user//{core.mesh_username}" # type:ignore
|
||||
)
|
||||
|
||||
if token == "err":
|
||||
return notify_error("Invalid mesh token")
|
||||
|
||||
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31" # type:ignore
|
||||
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31" # type:ignore
|
||||
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31" # type:ignore
|
||||
|
||||
AuditLog.audit_mesh_session(username=request.user.username, hostname=agent.hostname)
|
||||
AuditLog.audit_mesh_session(
|
||||
username=request.user.username,
|
||||
agent=agent,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
ret = {
|
||||
"hostname": agent.hostname,
|
||||
@@ -248,6 +250,16 @@ def send_raw_cmd(request):
|
||||
"shell": request.data["shell"],
|
||||
},
|
||||
}
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="cmd_run",
|
||||
command=request.data["cmd"],
|
||||
username=request.user.username[:50],
|
||||
)
|
||||
data["id"] = hist.pk
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
|
||||
if r == "timeout":
|
||||
@@ -255,9 +267,10 @@ def send_raw_cmd(request):
|
||||
|
||||
AuditLog.audit_raw_command(
|
||||
username=request.user.username,
|
||||
hostname=agent.hostname,
|
||||
agent=agent,
|
||||
cmd=request.data["cmd"],
|
||||
shell=request.data["shell"],
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
return Response(r)
|
||||
@@ -508,7 +521,7 @@ def install_agent(request):
|
||||
try:
|
||||
os.remove(ps1)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
DebugLog.error(message=str(e))
|
||||
|
||||
with open(ps1, "w") as f:
|
||||
f.write(text)
|
||||
@@ -566,26 +579,41 @@ def recover(request):
|
||||
@permission_classes([IsAuthenticated, RunScriptPerms])
|
||||
def run_script(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
script = get_object_or_404(Script, pk=request.data["script"])
|
||||
output = request.data["output"]
|
||||
args = request.data["args"]
|
||||
req_timeout = int(request.data["timeout"]) + 3
|
||||
|
||||
AuditLog.audit_script_run(
|
||||
username=request.user.username,
|
||||
hostname=agent.hostname,
|
||||
agent=agent,
|
||||
script=script.name,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
history_pk = 0
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="script_run",
|
||||
script=script,
|
||||
username=request.user.username[:50],
|
||||
)
|
||||
history_pk = hist.pk
|
||||
|
||||
if output == "wait":
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk, args=args, timeout=req_timeout, wait=True
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
timeout=req_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
return Response(r)
|
||||
|
||||
elif output == "email":
|
||||
emails = (
|
||||
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||
[] if request.data["emailMode"] == "default" else request.data["emails"]
|
||||
)
|
||||
run_script_email_results_task.delay(
|
||||
agentpk=agent.pk,
|
||||
@@ -594,8 +622,47 @@ def run_script(request):
|
||||
emails=emails,
|
||||
args=args,
|
||||
)
|
||||
elif output == "collector":
|
||||
from core.models import CustomField
|
||||
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
timeout=req_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
|
||||
custom_field = CustomField.objects.get(pk=request.data["custom_field"])
|
||||
|
||||
if custom_field.model == "agent":
|
||||
field = custom_field.get_or_create_field_value(agent)
|
||||
elif custom_field.model == "client":
|
||||
field = custom_field.get_or_create_field_value(agent.client)
|
||||
elif custom_field.model == "site":
|
||||
field = custom_field.get_or_create_field_value(agent.site)
|
||||
else:
|
||||
return notify_error("Custom Field was invalid")
|
||||
|
||||
value = r if request.data["save_all_output"] else r.split("\n")[-1].strip()
|
||||
|
||||
field.save_to_field(value)
|
||||
return Response(r)
|
||||
elif output == "note":
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
timeout=req_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
|
||||
Note.objects.create(agent=agent, user=request.user, note=r)
|
||||
return Response(r)
|
||||
else:
|
||||
agent.run_script(scriptpk=script.pk, args=args, timeout=req_timeout)
|
||||
agent.run_script(
|
||||
scriptpk=script.pk, args=args, timeout=req_timeout, history_pk=history_pk
|
||||
)
|
||||
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
|
||||
@@ -668,7 +735,7 @@ class GetEditDeleteNote(APIView):
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAuthenticated, RunBulkPerms])
|
||||
def bulk(request):
|
||||
if request.data["target"] == "agents" and not request.data["agentPKs"]:
|
||||
if request.data["target"] == "agents" and not request.data["agents"]:
|
||||
return notify_error("Must select at least 1 agent")
|
||||
|
||||
if request.data["target"] == "client":
|
||||
@@ -676,7 +743,7 @@ def bulk(request):
|
||||
elif request.data["target"] == "site":
|
||||
q = Agent.objects.filter(site_id=request.data["site"])
|
||||
elif request.data["target"] == "agents":
|
||||
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
q = Agent.objects.filter(pk__in=request.data["agents"])
|
||||
elif request.data["target"] == "all":
|
||||
q = Agent.objects.only("pk", "monitoring_type")
|
||||
else:
|
||||
@@ -689,29 +756,48 @@ def bulk(request):
|
||||
|
||||
agents: list[int] = [agent.pk for agent in q]
|
||||
|
||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||
if not agents:
|
||||
return notify_error("No agents where found meeting the selected criteria")
|
||||
|
||||
AuditLog.audit_bulk_action(
|
||||
request.user,
|
||||
request.data["mode"],
|
||||
request.data,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
if request.data["mode"] == "command":
|
||||
handle_bulk_command_task.delay(
|
||||
agents, request.data["cmd"], request.data["shell"], request.data["timeout"]
|
||||
agents,
|
||||
request.data["cmd"],
|
||||
request.data["shell"],
|
||||
request.data["timeout"],
|
||||
request.user.username[:50],
|
||||
run_on_offline=request.data["offlineAgents"],
|
||||
)
|
||||
return Response(f"Command will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "script":
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
script = get_object_or_404(Script, pk=request.data["script"])
|
||||
handle_bulk_script_task.delay(
|
||||
script.pk, agents, request.data["args"], request.data["timeout"]
|
||||
script.pk,
|
||||
agents,
|
||||
request.data["args"],
|
||||
request.data["timeout"],
|
||||
request.user.username[:50],
|
||||
)
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "install":
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["mode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(agents)
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
elif request.data["mode"] == "patch":
|
||||
|
||||
if request.data["patchMode"] == "install":
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["patchMode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(agents)
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
@@ -746,3 +832,11 @@ class WMI(APIView):
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class AgentHistoryView(APIView):
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
history = AgentHistory.objects.filter(agent=agent)
|
||||
|
||||
return Response(AgentHistorySerializer(history, many=True).data)
|
||||
|
||||
33
api/tacticalrmm/alerts/migrations/0007_auto_20210721_0423.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0006_auto_20210217_1736'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/alerts/migrations/0008_auto_20210721_1757.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 17:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0007_auto_20210721_0423'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='check_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='task_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/alerts/migrations/0009_auto_20210721_1810.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 18:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0008_auto_20210721_1757'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='check_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='task_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -3,19 +3,18 @@ from __future__ import annotations
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
SEVERITY_CHOICES = [
|
||||
("info", "Informational"),
|
||||
@@ -173,6 +172,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.agent_always_email
|
||||
always_text = alert_template.agent_always_text
|
||||
alert_interval = alert_template.agent_periodic_alert_days
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_availability_alert(instance)
|
||||
@@ -209,6 +209,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.check_always_email
|
||||
always_text = alert_template.check_always_text
|
||||
alert_interval = alert_template.check_periodic_alert_days
|
||||
run_script_action = alert_template.check_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_check_alert(instance)
|
||||
@@ -242,6 +243,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.task_always_email
|
||||
always_text = alert_template.task_always_text
|
||||
alert_interval = alert_template.task_periodic_alert_days
|
||||
run_script_action = alert_template.task_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_task_alert(instance)
|
||||
@@ -295,7 +297,7 @@ class Alert(models.Model):
|
||||
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||
|
||||
# check if any scripts should be run
|
||||
if alert_template and alert_template.action and not alert.action_run:
|
||||
if alert_template and alert_template.action and run_script_action and not alert.action_run: # type: ignore
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
@@ -314,8 +316,10 @@ class Alert(models.Model):
|
||||
alert.action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname} failure alert"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -345,6 +349,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.agent_email_on_resolved
|
||||
text_on_resolved = alert_template.agent_text_on_resolved
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
|
||||
elif isinstance(instance, Check):
|
||||
from checks.tasks import (
|
||||
@@ -363,6 +368,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.check_email_on_resolved
|
||||
text_on_resolved = alert_template.check_text_on_resolved
|
||||
run_script_action = alert_template.check_script_actions
|
||||
|
||||
elif isinstance(instance, AutomatedTask):
|
||||
from autotasks.tasks import (
|
||||
@@ -381,6 +387,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.task_email_on_resolved
|
||||
text_on_resolved = alert_template.task_text_on_resolved
|
||||
run_script_action = alert_template.task_script_actions
|
||||
|
||||
else:
|
||||
return
|
||||
@@ -403,6 +410,7 @@ class Alert(models.Model):
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.resolved_action
|
||||
and run_script_action # type: ignore
|
||||
and not alert.resolved_action_run
|
||||
):
|
||||
r = agent.run_script(
|
||||
@@ -425,8 +433,10 @@ class Alert(models.Model):
|
||||
alert.resolved_action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
||||
)
|
||||
|
||||
def parse_script_args(self, args: list[str]):
|
||||
@@ -451,7 +461,7 @@ class Alert(models.Model):
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(log_type="scripting", message=e)
|
||||
continue
|
||||
|
||||
else:
|
||||
@@ -460,7 +470,7 @@ class Alert(models.Model):
|
||||
return temp_args
|
||||
|
||||
|
||||
class AlertTemplate(models.Model):
|
||||
class AlertTemplate(BaseAuditModel):
|
||||
name = models.CharField(max_length=100)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
@@ -517,6 +527,7 @@ class AlertTemplate(models.Model):
|
||||
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
agent_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# check alert settings
|
||||
check_email_alert_severity = ArrayField(
|
||||
@@ -540,6 +551,7 @@ class AlertTemplate(models.Model):
|
||||
check_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
check_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# task alert settings
|
||||
task_email_alert_severity = ArrayField(
|
||||
@@ -563,6 +575,7 @@ class AlertTemplate(models.Model):
|
||||
task_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
task_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# exclusion settings
|
||||
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
||||
@@ -581,6 +594,13 @@ class AlertTemplate(models.Model):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(alert_template):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import AlertTemplateAuditSerializer
|
||||
|
||||
return AlertTemplateAuditSerializer(alert_template).data
|
||||
|
||||
@property
|
||||
def has_agent_settings(self) -> bool:
|
||||
return (
|
||||
|
||||
@@ -119,3 +119,9 @@ class AlertTemplateRelationSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class AlertTemplateAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from alerts.models import Alert
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def unsnooze_alerts() -> str:
|
||||
from .models import Alert
|
||||
|
||||
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
|
||||
snoozed=False, snooze_until=None
|
||||
@@ -22,3 +21,14 @@ def cache_agents_alert_template():
|
||||
agent.set_alert_template()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_resolved_alerts(older_than_days: int) -> str:
|
||||
from .models import Alert
|
||||
|
||||
Alert.objects.filter(resolved=True).filter(
|
||||
alert_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from core.models import CoreSettings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import AutomatedTask
|
||||
from core.models import CoreSettings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
from .serializers import (
|
||||
@@ -330,8 +329,8 @@ class TestAlertsViews(TacticalTestCase):
|
||||
baker.make("clients.Site", alert_template=alert_template, _quantity=3)
|
||||
baker.make("automation.Policy", alert_template=alert_template)
|
||||
core = CoreSettings.objects.first()
|
||||
core.alert_template = alert_template
|
||||
core.save()
|
||||
core.alert_template = alert_template # type: ignore
|
||||
core.save() # type: ignore
|
||||
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/related/" # type: ignore
|
||||
|
||||
@@ -403,16 +402,16 @@ class TestAlertTasks(TacticalTestCase):
|
||||
# assign first Alert Template as to a policy and apply it as default
|
||||
policy.alert_template = alert_templates[0] # type: ignore
|
||||
policy.save() # type: ignore
|
||||
core.workstation_policy = policy
|
||||
core.server_policy = policy
|
||||
core.save()
|
||||
core.workstation_policy = policy # type: ignore
|
||||
core.server_policy = policy # type: ignore
|
||||
core.save() # type: ignore
|
||||
|
||||
self.assertEquals(server.set_alert_template().pk, alert_templates[0].pk) # type: ignore
|
||||
self.assertEquals(workstation.set_alert_template().pk, alert_templates[0].pk) # type: ignore
|
||||
|
||||
# assign second Alert Template to as default alert template
|
||||
core.alert_template = alert_templates[1] # type: ignore
|
||||
core.save()
|
||||
core.save() # type: ignore
|
||||
|
||||
self.assertEquals(workstation.set_alert_template().pk, alert_templates[1].pk) # type: ignore
|
||||
self.assertEquals(server.set_alert_template().pk, alert_templates[1].pk) # type: ignore
|
||||
@@ -514,6 +513,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
)
|
||||
|
||||
from alerts.models import Alert
|
||||
|
||||
agent_dashboard_alert = baker.make_recipe("agents.overdue_agent")
|
||||
@@ -727,7 +727,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
send_email,
|
||||
sleep,
|
||||
):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from checks.models import Check
|
||||
from checks.tasks import (
|
||||
handle_check_email_alert_task,
|
||||
@@ -736,6 +735,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
handle_resolved_check_sms_alert_task,
|
||||
)
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
|
||||
# create test data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_no_settings = baker.make_recipe("agents.agent")
|
||||
@@ -1011,7 +1012,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
send_email,
|
||||
sleep,
|
||||
):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import (
|
||||
handle_resolved_task_email_alert,
|
||||
@@ -1020,6 +1020,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
handle_task_sms_alert,
|
||||
)
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
|
||||
# create test data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_no_settings = baker.make_recipe("agents.agent")
|
||||
@@ -1272,17 +1274,17 @@ class TestAlertTasks(TacticalTestCase):
|
||||
)
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
core.smtp_host = "test.test.com"
|
||||
core.smtp_port = 587
|
||||
core.smtp_recipients = ["recipient@test.com"]
|
||||
core.twilio_account_sid = "test"
|
||||
core.twilio_auth_token = "1234123412341234"
|
||||
core.sms_alert_recipients = ["+1234567890"]
|
||||
core.smtp_host = "test.test.com" # type: ignore
|
||||
core.smtp_port = 587 # type: ignore
|
||||
core.smtp_recipients = ["recipient@test.com"] # type: ignore
|
||||
core.twilio_account_sid = "test" # type: ignore
|
||||
core.twilio_auth_token = "1234123412341234" # type: ignore
|
||||
core.sms_alert_recipients = ["+1234567890"] # type: ignore
|
||||
|
||||
# test sending email with alert template settings
|
||||
core.send_mail("Test", "Test", alert_template=alert_template)
|
||||
core.send_mail("Test", "Test", alert_template=alert_template) # type: ignore
|
||||
|
||||
core.send_sms("Test", alert_template=alert_template)
|
||||
core.send_sms("Test", alert_template=alert_template) # type: ignore
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.tasks.agent_outage_sms_task.delay")
|
||||
@@ -1315,6 +1317,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"alerts.AlertTemplate",
|
||||
is_active=True,
|
||||
agent_always_alert=True,
|
||||
agent_script_actions=False,
|
||||
action=failure_action,
|
||||
action_timeout=30,
|
||||
resolved_action=resolved_action,
|
||||
@@ -1328,6 +1331,14 @@ class TestAlertTasks(TacticalTestCase):
|
||||
|
||||
agent_outages_task()
|
||||
|
||||
# should not have been called since agent_script_actions is set to False
|
||||
nats_cmd.assert_not_called()
|
||||
|
||||
alert_template.agent_script_actions = True # type: ignore
|
||||
alert_template.save() # type: ignore
|
||||
|
||||
agent_outages_task()
|
||||
|
||||
# this is what data should be
|
||||
data = {
|
||||
"func": "runscriptfull",
|
||||
@@ -1340,14 +1351,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# Setup cmd mock
|
||||
success = {
|
||||
"retcode": 0,
|
||||
"stdout": "success!",
|
||||
"stderr": "",
|
||||
"execution_time": 5.0000,
|
||||
}
|
||||
|
||||
nats_cmd.side_effect = ["pong", success]
|
||||
|
||||
# make sure script run results were stored
|
||||
@@ -1398,3 +1401,36 @@ class TestAlertTasks(TacticalTestCase):
|
||||
["-Parameter", f"-Another '{alert.id}'"], # type: ignore
|
||||
alert.parse_script_args(args=args), # type: ignore
|
||||
)
|
||||
|
||||
def test_prune_resolved_alerts(self):
|
||||
from .tasks import prune_resolved_alerts
|
||||
|
||||
# setup data
|
||||
resolved_alerts = baker.make(
|
||||
"alerts.Alert",
|
||||
resolved=True,
|
||||
_quantity=25,
|
||||
)
|
||||
|
||||
alerts = baker.make(
|
||||
"alerts.Alert",
|
||||
resolved=False,
|
||||
_quantity=25,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for alert in resolved_alerts: # type: ignore
|
||||
alert.alert_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
alert.save()
|
||||
days = days + 5
|
||||
|
||||
days = 0
|
||||
for alert in alerts: # type: ignore
|
||||
alert.alert_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
alert.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_resolved_alerts(30)
|
||||
|
||||
self.assertEqual(Alert.objects.count(), 31)
|
||||
|
||||
@@ -20,4 +20,5 @@ urlpatterns = [
|
||||
path("superseded/", views.SupersededWinUpdate.as_view()),
|
||||
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
|
||||
path("<str:agentid>/recovery/", views.AgentRecovery.as_view()),
|
||||
path("<int:pk>/<str:agentid>/histresult/", views.AgentHistoryResult.as_view()),
|
||||
]
|
||||
|
||||
@@ -6,7 +6,6 @@ from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.authtoken.models import Token
|
||||
@@ -15,20 +14,18 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from agents.models import Agent, AgentCustomField
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from agents.models import Agent, AgentHistory
|
||||
from agents.serializers import WinAgentSerializer, AgentHistorySerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
from checks.models import Check
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from checks.utils import bytes2human
|
||||
from logs.models import PendingAction
|
||||
from logs.models import PendingAction, DebugLog
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class CheckIn(APIView):
|
||||
|
||||
@@ -36,6 +33,10 @@ class CheckIn(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request):
|
||||
"""
|
||||
!!! DEPRECATED AS OF AGENT 1.6.0 !!!
|
||||
Endpoint be removed in a future release
|
||||
"""
|
||||
from alerts.models import Alert
|
||||
|
||||
updated = False
|
||||
@@ -182,7 +183,11 @@ class WinUpdates(APIView):
|
||||
|
||||
if reboot:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="windows_updates",
|
||||
message=f"{agent.hostname} is rebooting after updates were installed.",
|
||||
)
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
@@ -350,7 +355,7 @@ class TaskRunner(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
_ = get_object_or_404(Agent, agent_id=agentid)
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
return Response(TaskGOGetSerializer(task).data)
|
||||
|
||||
@@ -371,38 +376,7 @@ class TaskRunner(APIView):
|
||||
if task.custom_field:
|
||||
if not task.stderr:
|
||||
|
||||
if AgentCustomField.objects.filter(
|
||||
field=task.custom_field, agent=task.agent
|
||||
).exists():
|
||||
agent_field = AgentCustomField.objects.get(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
else:
|
||||
agent_field = AgentCustomField.objects.create(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
|
||||
# get last line of stdout
|
||||
value = (
|
||||
new_task.stdout
|
||||
if task.collector_all_output
|
||||
else new_task.stdout.split("\n")[-1].strip()
|
||||
)
|
||||
|
||||
if task.custom_field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
agent_field.string_value = value
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "multiple":
|
||||
agent_field.multiple_value = value.split(",")
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "checkbox":
|
||||
agent_field.bool_value = bool(value)
|
||||
agent_field.save()
|
||||
task.save_collector_results()
|
||||
|
||||
status = "passing"
|
||||
else:
|
||||
@@ -419,15 +393,6 @@ class TaskRunner(APIView):
|
||||
else:
|
||||
Alert.handle_alert_failure(new_task)
|
||||
|
||||
AuditLog.objects.create(
|
||||
username=agent.hostname,
|
||||
agent=agent.hostname,
|
||||
object_type="agent",
|
||||
action="task_run",
|
||||
message=f"Scheduled Task {task.name} was run on {agent.hostname}",
|
||||
after_value=AutomatedTask.serialize(new_task),
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -518,6 +483,7 @@ class NewAgent(APIView):
|
||||
action="agent_install",
|
||||
message=f"{request.user} installed new agent {agent.hostname}",
|
||||
after_value=Agent.serialize(agent),
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
return Response(
|
||||
@@ -622,3 +588,16 @@ class AgentRecovery(APIView):
|
||||
reload_nats()
|
||||
|
||||
return Response(ret)
|
||||
|
||||
|
||||
class AgentHistoryResult(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request, agentid, pk):
|
||||
_ = get_object_or_404(Agent, agent_id=agentid)
|
||||
hist = get_object_or_404(AgentHistory, pk=pk)
|
||||
s = AgentHistorySerializer(instance=hist, data=request.data, partial=True)
|
||||
s.is_valid(raise_exception=True)
|
||||
s.save()
|
||||
return Response("ok")
|
||||
|
||||
@@ -33,7 +33,7 @@ class Policy(BaseAuditModel):
|
||||
|
||||
# get old policy if exists
|
||||
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
super(Policy, self).save(old_model=old_policy, *args, **kwargs)
|
||||
|
||||
# generate agent checks only if active and enforced were changed
|
||||
if old_policy:
|
||||
@@ -50,7 +50,7 @@ class Policy(BaseAuditModel):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
super(Policy, self).delete(*args, **kwargs)
|
||||
|
||||
generate_agent_checks_task.delay(agents=agents, create_tasks=True)
|
||||
|
||||
@@ -126,9 +126,9 @@ class Policy(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(policy):
|
||||
# serializes the policy and returns json
|
||||
from .serializers import PolicySerializer
|
||||
from .serializers import PolicyAuditSerializer
|
||||
|
||||
return PolicySerializer(policy).data
|
||||
return PolicyAuditSerializer(policy).data
|
||||
|
||||
@staticmethod
|
||||
def cascade_policy_tasks(agent):
|
||||
|
||||
@@ -89,3 +89,9 @@ class AutoTasksFieldSerializer(ModelSerializer):
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
depth = 1
|
||||
|
||||
|
||||
class PolicyAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Policy
|
||||
fields = "__all__"
|
||||
|
||||
@@ -6,19 +6,15 @@ from typing import List
|
||||
|
||||
import pytz
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import DateTimeField
|
||||
from django.db.utils import DatabaseError
|
||||
from django.utils import timezone as djangotime
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from packaging import version as pyver
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
RUN_TIME_DAY_CHOICES = [
|
||||
(0, "Monday"),
|
||||
(1, "Tuesday"),
|
||||
@@ -195,9 +191,9 @@ class AutomatedTask(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(task):
|
||||
# serializes the task and returns json
|
||||
from .serializers import TaskSerializer
|
||||
from .serializers import TaskAuditSerializer
|
||||
|
||||
return TaskSerializer(task).data
|
||||
return TaskAuditSerializer(task).data
|
||||
|
||||
def create_policy_task(self, agent=None, policy=None, assigned_check=None):
|
||||
|
||||
@@ -254,7 +250,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
elif self.task_type == "runonce":
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(agent.timezone)
|
||||
agent_tz = pytz.timezone(agent.timezone) # type: ignore
|
||||
task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone(
|
||||
pytz.utc
|
||||
)
|
||||
@@ -280,7 +276,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
},
|
||||
}
|
||||
|
||||
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse(
|
||||
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse( # type: ignore
|
||||
"1.4.7"
|
||||
):
|
||||
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||
@@ -301,19 +297,25 @@ class AutomatedTask(BaseAuditModel):
|
||||
else:
|
||||
return "error"
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) # type: ignore
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "initial"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in."
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in.", # type: ignore
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully created")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} was successfully created", # type: ignore
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -333,19 +335,25 @@ class AutomatedTask(BaseAuditModel):
|
||||
"enabled": self.enabled,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) # type: ignore
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "notsynced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to modify scheduled task {self.name} on {agent.hostname}. It will try again on next agent checkin"
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to modify scheduled task {self.name} on {agent.hostname}({agent.pk}). It will try again on next agent checkin", # type: ignore
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully modified")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} was successfully modified", # type: ignore
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -362,7 +370,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": self.win_task_name},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10)) # type: ignore
|
||||
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
self.sync_status = "pendingdeletion"
|
||||
@@ -372,13 +380,19 @@ class AutomatedTask(BaseAuditModel):
|
||||
except DatabaseError:
|
||||
pass
|
||||
|
||||
logger.warning(
|
||||
f"{agent.hostname} task {self.name} will be deleted on next checkin"
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} will be deleted on next checkin", # type: ignore
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.delete()
|
||||
logger.info(f"{agent.hostname} task {self.name} was deleted")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname}({agent.pk}) task {self.name} was deleted", # type: ignore
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -391,9 +405,20 @@ class AutomatedTask(BaseAuditModel):
|
||||
.first()
|
||||
)
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False))
|
||||
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False)) # type: ignore
|
||||
return "ok"
|
||||
|
||||
def save_collector_results(self):
|
||||
|
||||
agent_field = self.custom_field.get_or_create_field_value(self.agent)
|
||||
|
||||
value = (
|
||||
self.stdout
|
||||
if self.collector_all_output
|
||||
else self.stdout.split("\n")[-1].strip()
|
||||
)
|
||||
agent_field.save_to_field(value)
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
return (
|
||||
self.dashboard_alert
|
||||
@@ -424,7 +449,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, self.agent.alert_template)
|
||||
CORE.send_mail(subject, body, self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_sms(self):
|
||||
from core.models import CoreSettings
|
||||
@@ -441,7 +466,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_resolved_email(self):
|
||||
from core.models import CoreSettings
|
||||
@@ -453,7 +478,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_resolved_sms(self):
|
||||
from core.models import CoreSettings
|
||||
@@ -464,4 +489,4 @@ class AutomatedTask(BaseAuditModel):
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
@@ -84,3 +84,9 @@ class TaskRunnerPatchSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class TaskAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
from logging import log
|
||||
import random
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from logs.models import DebugLog
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@app.task
|
||||
def create_win_task_schedule(pk):
|
||||
@@ -53,12 +51,20 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
|
||||
logger.info(f"Orphaned task cleanup initiated on {agent.hostname}.")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Orphaned task cleanup initiated on {agent.hostname}.",
|
||||
)
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
|
||||
|
||||
if not isinstance(r, list) and not r: # empty list
|
||||
logger.error(f"Unable to clean up scheduled tasks on {agent.hostname}: {r}")
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to clean up scheduled tasks on {agent.hostname}: {r}",
|
||||
)
|
||||
return "notlist"
|
||||
|
||||
agent_task_names = list(agent.autotasks.values_list("win_task_name", flat=True))
|
||||
@@ -83,13 +89,23 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
}
|
||||
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if ret != "ok":
|
||||
logger.error(
|
||||
f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}",
|
||||
)
|
||||
else:
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Removed orphaned task {task} from {agent.hostname}",
|
||||
)
|
||||
|
||||
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Orphaned task cleanup finished on {agent.hostname}",
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
|
||||
@@ -12,10 +12,6 @@ from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
CHECK_TYPE_CHOICES = [
|
||||
("diskspace", "Disk Space Check"),
|
||||
@@ -475,9 +471,9 @@ class Check(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(check):
|
||||
# serializes the check and returns json
|
||||
from .serializers import CheckSerializer
|
||||
from .serializers import CheckAuditSerializer
|
||||
|
||||
return CheckSerializer(check).data
|
||||
return CheckAuditSerializer(check).data
|
||||
|
||||
# for policy diskchecks
|
||||
@staticmethod
|
||||
|
||||
@@ -220,3 +220,9 @@ class CheckHistorySerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CheckHistory
|
||||
fields = ("x", "y", "results")
|
||||
|
||||
|
||||
class CheckAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Check
|
||||
fields = "__all__"
|
||||
|
||||
@@ -33,13 +33,17 @@ class Client(BaseAuditModel):
|
||||
blank=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kw):
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old client if exists
|
||||
old_client = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kw)
|
||||
old_client = Client.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Client, self).save(
|
||||
old_model=old_client,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_client:
|
||||
@@ -50,7 +54,6 @@ class Client(BaseAuditModel):
|
||||
old_client.block_policy_inheritance != self.block_policy_inheritance
|
||||
)
|
||||
):
|
||||
|
||||
generate_agent_checks_task.delay(
|
||||
client=self.pk,
|
||||
create_tasks=True,
|
||||
@@ -120,10 +123,10 @@ class Client(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def serialize(client):
|
||||
# serializes the client and returns json
|
||||
from .serializers import ClientSerializer
|
||||
from .serializers import ClientAuditSerializer
|
||||
|
||||
return ClientSerializer(client).data
|
||||
# serializes the client and returns json
|
||||
return ClientAuditSerializer(client).data
|
||||
|
||||
|
||||
class Site(BaseAuditModel):
|
||||
@@ -153,13 +156,17 @@ class Site(BaseAuditModel):
|
||||
blank=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kw):
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old client if exists
|
||||
old_site = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(*args, **kw)
|
||||
old_site = Site.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(
|
||||
old_model=old_site,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_site:
|
||||
@@ -168,11 +175,10 @@ class Site(BaseAuditModel):
|
||||
or (old_site.workstation_policy != self.workstation_policy)
|
||||
or (old_site.block_policy_inheritance != self.block_policy_inheritance)
|
||||
):
|
||||
|
||||
generate_agent_checks_task.delay(site=self.pk, create_tasks=True)
|
||||
|
||||
if old_site.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
if old_site.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
@@ -233,10 +239,10 @@ class Site(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def serialize(site):
|
||||
# serializes the site and returns json
|
||||
from .serializers import SiteSerializer
|
||||
from .serializers import SiteAuditSerializer
|
||||
|
||||
return SiteSerializer(site).data
|
||||
# serializes the site and returns json
|
||||
return SiteAuditSerializer(site).data
|
||||
|
||||
|
||||
MON_TYPE_CHOICES = [
|
||||
@@ -308,6 +314,22 @@ class ClientCustomField(models.Model):
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif type == "multiple":
|
||||
self.multiple_value = value.split(",")
|
||||
self.save()
|
||||
elif type == "checkbox":
|
||||
self.bool_value = bool(value)
|
||||
self.save()
|
||||
|
||||
|
||||
class SiteCustomField(models.Model):
|
||||
site = models.ForeignKey(
|
||||
@@ -342,3 +364,19 @@ class SiteCustomField(models.Model):
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif type == "multiple":
|
||||
self.multiple_value = value.split(",")
|
||||
self.save()
|
||||
elif type == "checkbox":
|
||||
self.bool_value = bool(value)
|
||||
self.save()
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField, ValidationError
|
||||
from django.db.models.base import Model
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
Serializer,
|
||||
ValidationError,
|
||||
)
|
||||
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
|
||||
@@ -134,3 +140,15 @@ class DeploymentSerializer(ModelSerializer):
|
||||
"install_flags",
|
||||
"created",
|
||||
]
|
||||
|
||||
|
||||
class SiteAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ClientAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = "__all__"
|
||||
|
||||
@@ -3,10 +3,8 @@ import re
|
||||
import uuid
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
@@ -26,8 +24,6 @@ from .serializers import (
|
||||
SiteSerializer,
|
||||
)
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class GetAddClients(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageClientsPerms]
|
||||
|
||||
23
api/tacticalrmm/core/migrations/0024_auto_20210707_1828.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-07 18:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0023_coresettings_clear_faults_days'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='agent_history_prune_days',
|
||||
field=models.PositiveIntegerField(default=30),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='resolved_alerts_prune_days',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/core/migrations/0025_auto_20210707_1835.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-07 18:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0024_auto_20210707_1828'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='agent_debug_level',
|
||||
field=models.CharField(choices=[('info', 'Info'), ('warning', 'Warning'), ('error', 'Error'), ('critical', 'Critical')], default='info', max_length=20),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='debug_log_prune_days',
|
||||
field=models.PositiveIntegerField(default=30),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='coresettings',
|
||||
name='agent_history_prune_days',
|
||||
field=models.PositiveIntegerField(default=60),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 17:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0025_auto_20210707_1835'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='audit_log_prune_days',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
@@ -1,17 +1,15 @@
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
from django.db.models.enums import Choices
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
from twilio.rest import Client as TwClient
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import BaseAuditModel, DebugLog, LOG_LEVEL_CHOICES
|
||||
|
||||
TZ_CHOICES = [(_, _) for _ in pytz.all_timezones]
|
||||
|
||||
@@ -51,6 +49,13 @@ class CoreSettings(BaseAuditModel):
|
||||
)
|
||||
# removes check history older than days
|
||||
check_history_prune_days = models.PositiveIntegerField(default=30)
|
||||
resolved_alerts_prune_days = models.PositiveIntegerField(default=0)
|
||||
agent_history_prune_days = models.PositiveIntegerField(default=60)
|
||||
debug_log_prune_days = models.PositiveIntegerField(default=30)
|
||||
audit_log_prune_days = models.PositiveIntegerField(default=0)
|
||||
agent_debug_level = models.CharField(
|
||||
max_length=20, choices=LOG_LEVEL_CHOICES, default="info"
|
||||
)
|
||||
clear_faults_days = models.IntegerField(default=0)
|
||||
mesh_token = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||
mesh_username = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||
@@ -184,14 +189,14 @@ class CoreSettings(BaseAuditModel):
|
||||
server.quit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Sending email failed with error: {e}")
|
||||
DebugLog.error(message=f"Sending email failed with error: {e}")
|
||||
if test:
|
||||
return str(e)
|
||||
else:
|
||||
return True
|
||||
|
||||
def send_sms(self, body, alert_template=None):
|
||||
if not alert_template and not self.sms_is_configured:
|
||||
if not alert_template or not self.sms_is_configured:
|
||||
return
|
||||
|
||||
# override email recipients if alert_template is passed and is set
|
||||
@@ -205,7 +210,7 @@ class CoreSettings(BaseAuditModel):
|
||||
try:
|
||||
tw_client.messages.create(body=body, to=num, from_=self.twilio_number)
|
||||
except Exception as e:
|
||||
logger.error(f"SMS failed to send: {e}")
|
||||
DebugLog.error(message=f"SMS failed to send: {e}")
|
||||
|
||||
@staticmethod
|
||||
def serialize(core):
|
||||
@@ -265,6 +270,26 @@ class CustomField(models.Model):
|
||||
else:
|
||||
return self.default_value_string
|
||||
|
||||
def get_or_create_field_value(self, instance):
|
||||
from agents.models import Agent, AgentCustomField
|
||||
from clients.models import Client, ClientCustomField, Site, SiteCustomField
|
||||
|
||||
if isinstance(instance, Agent):
|
||||
if AgentCustomField.objects.filter(field=self, agent=instance).exists():
|
||||
return AgentCustomField.objects.get(field=self, agent=instance)
|
||||
else:
|
||||
return AgentCustomField.objects.create(field=self, agent=instance)
|
||||
elif isinstance(instance, Client):
|
||||
if ClientCustomField.objects.filter(field=self, client=instance).exists():
|
||||
return ClientCustomField.objects.get(field=self, client=instance)
|
||||
else:
|
||||
return ClientCustomField.objects.create(field=self, client=instance)
|
||||
elif isinstance(instance, Site):
|
||||
if SiteCustomField.objects.filter(field=self, site=instance).exists():
|
||||
return SiteCustomField.objects.get(field=self, site=instance)
|
||||
else:
|
||||
return SiteCustomField.objects.create(field=self, site=instance)
|
||||
|
||||
|
||||
class CodeSignToken(models.Model):
|
||||
token = models.CharField(max_length=255, null=True, blank=True)
|
||||
@@ -287,6 +312,9 @@ class GlobalKVStore(models.Model):
|
||||
return self.name
|
||||
|
||||
|
||||
OPEN_ACTIONS = (("window", "New Window"), ("tab", "New Tab"))
|
||||
|
||||
|
||||
class URLAction(models.Model):
|
||||
name = models.CharField(max_length=25)
|
||||
desc = models.CharField(max_length=100, null=True, blank=True)
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
from checks.tasks import prune_check_history
|
||||
from agents.tasks import clear_faults_task
|
||||
from agents.tasks import clear_faults_task, prune_agent_history
|
||||
from alerts.tasks import prune_resolved_alerts
|
||||
from core.models import CoreSettings
|
||||
from logs.tasks import prune_debug_log, prune_audit_log
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@app.task
|
||||
def core_maintenance_tasks():
|
||||
@@ -32,11 +30,28 @@ def core_maintenance_tasks():
|
||||
core = CoreSettings.objects.first()
|
||||
|
||||
# remove old CheckHistory data
|
||||
if core.check_history_prune_days > 0:
|
||||
prune_check_history.delay(core.check_history_prune_days)
|
||||
if core.check_history_prune_days > 0: # type: ignore
|
||||
prune_check_history.delay(core.check_history_prune_days) # type: ignore
|
||||
|
||||
# remove old resolved alerts
|
||||
if core.resolved_alerts_prune_days > 0: # type: ignore
|
||||
prune_resolved_alerts.delay(core.resolved_alerts_prune_days) # type: ignore
|
||||
|
||||
# remove old agent history
|
||||
if core.agent_history_prune_days > 0: # type: ignore
|
||||
prune_agent_history.delay(core.agent_history_prune_days) # type: ignore
|
||||
|
||||
# remove old debug logs
|
||||
if core.debug_log_prune_days > 0: # type: ignore
|
||||
prune_debug_log.delay(core.debug_log_prune_days) # type: ignore
|
||||
|
||||
# remove old audit logs
|
||||
if core.audit_log_prune_days > 0: # type: ignore
|
||||
prune_audit_log.delay(core.audit_log_prune_days) # type: ignore
|
||||
|
||||
# clear faults
|
||||
if core.clear_faults_days > 0:
|
||||
clear_faults_task.delay(core.clear_faults_days)
|
||||
if core.clear_faults_days > 0: # type: ignore
|
||||
clear_faults_task.delay(core.clear_faults_days) # type: ignore
|
||||
|
||||
|
||||
@app.task
|
||||
|
||||
@@ -346,9 +346,18 @@ class RunURLAction(APIView):
|
||||
from requests.utils import requote_uri
|
||||
|
||||
from agents.models import Agent
|
||||
from clients.models import Client, Site
|
||||
from tacticalrmm.utils import replace_db_values
|
||||
|
||||
agent = get_object_or_404(Agent, pk=request.data["agent"])
|
||||
if "agent" in request.data.keys():
|
||||
instance = get_object_or_404(Agent, pk=request.data["agent"])
|
||||
elif "site" in request.data.keys():
|
||||
instance = get_object_or_404(Site, pk=request.data["site"])
|
||||
elif "client" in request.data.keys():
|
||||
instance = get_object_or_404(Client, pk=request.data["client"])
|
||||
else:
|
||||
return notify_error("received an incorrect request")
|
||||
|
||||
action = get_object_or_404(URLAction, pk=request.data["action"])
|
||||
|
||||
pattern = re.compile("\\{\\{([\\w\\s]+\\.[\\w\\s]+)\\}\\}")
|
||||
@@ -356,7 +365,7 @@ class RunURLAction(APIView):
|
||||
url_pattern = action.pattern
|
||||
|
||||
for string in re.findall(pattern, action.pattern):
|
||||
value = replace_db_values(string=string, agent=agent, quotes=False)
|
||||
value = replace_db_values(string=string, instance=instance, quotes=False)
|
||||
|
||||
url_pattern = re.sub("\\{\\{" + string + "\\}\\}", str(value), url_pattern)
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import AuditLog, PendingAction
|
||||
from .models import AuditLog, PendingAction, DebugLog
|
||||
|
||||
admin.site.register(PendingAction)
|
||||
admin.site.register(AuditLog)
|
||||
admin.site.register(DebugLog)
|
||||
|
||||
68
api/tacticalrmm/logs/migrations/0013_auto_20210614_1835.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# Generated by Django 3.2.1 on 2021-06-14 18:35
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("logs", "0012_auto_20210228_0943"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="agent",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="debuglogs",
|
||||
to="agents.agent",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="entry_time",
|
||||
field=models.DateTimeField(
|
||||
auto_now_add=True, default=django.utils.timezone.now
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="log_level",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("info", "Info"),
|
||||
("warning", "Warning"),
|
||||
("error", "Error"),
|
||||
("critical", "Critical"),
|
||||
],
|
||||
default="info",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="log_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("agent_update", "Agent Update"),
|
||||
("agent_issues", "Agent Issues"),
|
||||
("win_updates", "Windows Updates"),
|
||||
("system_issues", "System Issues"),
|
||||
("scripting", "Scripting"),
|
||||
],
|
||||
default="system_issues",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="message",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/logs/migrations/0014_auditlog_agent_id.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-06-28 02:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0013_auto_20210614_1835'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='auditlog',
|
||||
name='agent_id',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0014_auditlog_agent_id'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='auditlog',
|
||||
name='object_type',
|
||||
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alert_template', 'Alert Template'), ('role', 'Role')], max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 17:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0015_alter_auditlog_object_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='auditlog',
|
||||
name='object_type',
|
||||
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alerttemplate', 'Alert Template'), ('role', 'Role')], max_length=100),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/logs/migrations/0017_auto_20210731_1707.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-31 17:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0016_alter_auditlog_object_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='pendingaction',
|
||||
name='cancelable',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pendingaction',
|
||||
name='action_type',
|
||||
field=models.CharField(blank=True, choices=[('schedreboot', 'Scheduled Reboot'), ('taskaction', 'Scheduled Task Action'), ('agentupdate', 'Agent Update'), ('chocoinstall', 'Chocolatey Software Install'), ('runcmd', 'Run Command'), ('runscript', 'Run Script'), ('runpatchscan', 'Run Patch Scan'), ('runpatchinstall', 'Run Patch Install')], max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -2,14 +2,24 @@ import datetime as dt
|
||||
from abc import abstractmethod
|
||||
|
||||
from django.db import models
|
||||
|
||||
from tacticalrmm.middleware import get_debug_info, get_username
|
||||
|
||||
|
||||
def get_debug_level():
|
||||
from core.models import CoreSettings
|
||||
|
||||
return CoreSettings.objects.first().agent_debug_level # type: ignore
|
||||
|
||||
|
||||
ACTION_TYPE_CHOICES = [
|
||||
("schedreboot", "Scheduled Reboot"),
|
||||
("taskaction", "Scheduled Task Action"), # deprecated
|
||||
("agentupdate", "Agent Update"),
|
||||
("chocoinstall", "Chocolatey Software Install"),
|
||||
("runcmd", "Run Command"),
|
||||
("runscript", "Run Script"),
|
||||
("runpatchscan", "Run Patch Scan"),
|
||||
("runpatchinstall", "Run Patch Install"),
|
||||
]
|
||||
|
||||
AUDIT_ACTION_TYPE_CHOICES = [
|
||||
@@ -40,6 +50,8 @@ AUDIT_OBJECT_TYPE_CHOICES = [
|
||||
("automatedtask", "Automated Task"),
|
||||
("coresettings", "Core Settings"),
|
||||
("bulk", "Bulk"),
|
||||
("alerttemplate", "Alert Template"),
|
||||
("role", "Role"),
|
||||
]
|
||||
|
||||
STATUS_CHOICES = [
|
||||
@@ -51,6 +63,7 @@ STATUS_CHOICES = [
|
||||
class AuditLog(models.Model):
|
||||
username = models.CharField(max_length=100)
|
||||
agent = models.CharField(max_length=255, null=True, blank=True)
|
||||
agent_id = models.PositiveIntegerField(blank=True, null=True)
|
||||
entry_time = models.DateTimeField(auto_now_add=True)
|
||||
action = models.CharField(max_length=100, choices=AUDIT_ACTION_TYPE_CHOICES)
|
||||
object_type = models.CharField(max_length=100, choices=AUDIT_OBJECT_TYPE_CHOICES)
|
||||
@@ -73,24 +86,25 @@ class AuditLog(models.Model):
|
||||
return super(AuditLog, self).save(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def audit_mesh_session(username, hostname, debug_info={}):
|
||||
def audit_mesh_session(username, agent, debug_info={}):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
agent=hostname,
|
||||
agent=agent.hostname,
|
||||
agent_id=agent.id,
|
||||
object_type="agent",
|
||||
action="remote_session",
|
||||
message=f"{username} used Mesh Central to initiate a remote session to {hostname}.",
|
||||
message=f"{username} used Mesh Central to initiate a remote session to {agent.hostname}.",
|
||||
debug_info=debug_info,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def audit_raw_command(username, hostname, cmd, shell, debug_info={}):
|
||||
def audit_raw_command(username, agent, cmd, shell, debug_info={}):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
agent=hostname,
|
||||
agent=agent.hostname,
|
||||
object_type="agent",
|
||||
action="execute_command",
|
||||
message=f"{username} issued {shell} command on {hostname}.",
|
||||
message=f"{username} issued {shell} command on {agent.hostname}.",
|
||||
after_value=cmd,
|
||||
debug_info=debug_info,
|
||||
)
|
||||
@@ -102,6 +116,7 @@ class AuditLog(models.Model):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
object_type=object_type,
|
||||
agent_id=before["id"] if object_type == "agent" else None,
|
||||
action="modify",
|
||||
message=f"{username} modified {object_type} {name}",
|
||||
before_value=before,
|
||||
@@ -114,6 +129,7 @@ class AuditLog(models.Model):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
object_type=object_type,
|
||||
agent=after["id"] if object_type == "agent" else None,
|
||||
action="add",
|
||||
message=f"{username} added {object_type} {name}",
|
||||
after_value=after,
|
||||
@@ -125,6 +141,7 @@ class AuditLog(models.Model):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
object_type=object_type,
|
||||
agent=before["id"] if object_type == "agent" else None,
|
||||
action="delete",
|
||||
message=f"{username} deleted {object_type} {name}",
|
||||
before_value=before,
|
||||
@@ -132,13 +149,14 @@ class AuditLog(models.Model):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def audit_script_run(username, hostname, script, debug_info={}):
|
||||
def audit_script_run(username, agent, script, debug_info={}):
|
||||
AuditLog.objects.create(
|
||||
agent=hostname,
|
||||
agent=agent.hostname,
|
||||
agent_id=agent.id,
|
||||
username=username,
|
||||
object_type="agent",
|
||||
action="execute_script",
|
||||
message=f'{username} ran script: "{script}" on {hostname}',
|
||||
message=f'{username} ran script: "{script}" on {agent.hostname}',
|
||||
debug_info=debug_info,
|
||||
)
|
||||
|
||||
@@ -190,13 +208,13 @@ class AuditLog(models.Model):
|
||||
site = Site.objects.get(pk=affected["site"])
|
||||
target = f"on all agents within site: {site.client.name}\\{site.name}"
|
||||
elif affected["target"] == "agents":
|
||||
agents = Agent.objects.filter(pk__in=affected["agentPKs"]).values_list(
|
||||
agents = Agent.objects.filter(pk__in=affected["agents"]).values_list(
|
||||
"hostname", flat=True
|
||||
)
|
||||
target = "on multiple agents"
|
||||
|
||||
if action == "script":
|
||||
script = Script.objects.get(pk=affected["scriptPK"])
|
||||
script = Script.objects.get(pk=affected["script"])
|
||||
action = f"script: {script.name}"
|
||||
|
||||
if agents:
|
||||
@@ -212,8 +230,63 @@ class AuditLog(models.Model):
|
||||
)
|
||||
|
||||
|
||||
LOG_LEVEL_CHOICES = [
|
||||
("info", "Info"),
|
||||
("warning", "Warning"),
|
||||
("error", "Error"),
|
||||
("critical", "Critical"),
|
||||
]
|
||||
|
||||
LOG_TYPE_CHOICES = [
|
||||
("agent_update", "Agent Update"),
|
||||
("agent_issues", "Agent Issues"),
|
||||
("win_updates", "Windows Updates"),
|
||||
("system_issues", "System Issues"),
|
||||
("scripting", "Scripting"),
|
||||
]
|
||||
|
||||
|
||||
class DebugLog(models.Model):
|
||||
pass
|
||||
entry_time = models.DateTimeField(auto_now_add=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="debuglogs",
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
log_level = models.CharField(
|
||||
max_length=50, choices=LOG_LEVEL_CHOICES, default="info"
|
||||
)
|
||||
log_type = models.CharField(
|
||||
max_length=50, choices=LOG_TYPE_CHOICES, default="system_issues"
|
||||
)
|
||||
message = models.TextField(null=True, blank=True)
|
||||
|
||||
@classmethod
|
||||
def info(
|
||||
cls,
|
||||
message,
|
||||
agent=None,
|
||||
log_type="system_issues",
|
||||
):
|
||||
if get_debug_level() in ["info"]:
|
||||
cls(log_level="info", agent=agent, log_type=log_type, message=message)
|
||||
|
||||
@classmethod
|
||||
def warning(cls, message, agent=None, log_type="system_issues"):
|
||||
if get_debug_level() in ["info", "warning"]:
|
||||
cls(log_level="warning", agent=agent, log_type=log_type, message=message)
|
||||
|
||||
@classmethod
|
||||
def error(cls, message, agent=None, log_type="system_issues"):
|
||||
if get_debug_level() in ["info", "warning", "error"]:
|
||||
cls(log_level="error", agent=agent, log_type=log_type, message=message)
|
||||
|
||||
@classmethod
|
||||
def critical(cls, message, agent=None, log_type="system_issues"):
|
||||
if get_debug_level() in ["info", "warning", "error", "critical"]:
|
||||
cls(log_level="critical", agent=agent, log_type=log_type, message=message)
|
||||
|
||||
|
||||
class PendingAction(models.Model):
|
||||
@@ -232,6 +305,7 @@ class PendingAction(models.Model):
|
||||
choices=STATUS_CHOICES,
|
||||
default="pending",
|
||||
)
|
||||
cancelable = models.BooleanField(blank=True, default=False)
|
||||
celery_id = models.CharField(null=True, blank=True, max_length=255)
|
||||
details = models.JSONField(null=True, blank=True)
|
||||
|
||||
@@ -247,6 +321,8 @@ class PendingAction(models.Model):
|
||||
return "Next update cycle"
|
||||
elif self.action_type == "chocoinstall":
|
||||
return "ASAP"
|
||||
else:
|
||||
return "On next checkin"
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
@@ -259,6 +335,14 @@ class PendingAction(models.Model):
|
||||
elif self.action_type == "chocoinstall":
|
||||
return f"{self.details['name']} software install"
|
||||
|
||||
elif self.action_type in [
|
||||
"runcmd",
|
||||
"runscript",
|
||||
"runpatchscan",
|
||||
"runpatchinstall",
|
||||
]:
|
||||
return f"{self.action_type}"
|
||||
|
||||
|
||||
class BaseAuditModel(models.Model):
|
||||
# abstract base class for auditing models
|
||||
@@ -275,13 +359,14 @@ class BaseAuditModel(models.Model):
|
||||
def serialize():
|
||||
pass
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def save(self, old_model=None, *args, **kwargs):
|
||||
|
||||
if get_username():
|
||||
|
||||
before_value = {}
|
||||
object_class = type(self)
|
||||
object_name = object_class.__name__.lower()
|
||||
username = get_username()
|
||||
after_value = object_class.serialize(self) # type: ignore
|
||||
|
||||
# populate created_by and modified_by fields on instance
|
||||
if not getattr(self, "created_by", None):
|
||||
@@ -289,32 +374,37 @@ class BaseAuditModel(models.Model):
|
||||
if hasattr(self, "modified_by"):
|
||||
self.modified_by = username
|
||||
|
||||
# capture object properties before edit
|
||||
if self.pk:
|
||||
before_value = object_class.objects.get(pk=self.id)
|
||||
|
||||
# dont create entry for agent add since that is done in view
|
||||
if not self.pk:
|
||||
AuditLog.audit_object_add(
|
||||
username,
|
||||
object_name,
|
||||
object_class.serialize(self),
|
||||
after_value, # type: ignore
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
else:
|
||||
AuditLog.audit_object_changed(
|
||||
username,
|
||||
object_class.__name__.lower(),
|
||||
object_class.serialize(before_value),
|
||||
object_class.serialize(self),
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
|
||||
return super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
if old_model:
|
||||
before_value = object_class.serialize(old_model) # type: ignore
|
||||
else:
|
||||
before_value = object_class.serialize(object_class.objects.get(pk=self.pk)) # type: ignore
|
||||
# only create an audit entry if the values have changed
|
||||
if before_value != after_value: # type: ignore
|
||||
|
||||
AuditLog.audit_object_changed(
|
||||
username,
|
||||
object_class.__name__.lower(),
|
||||
before_value,
|
||||
after_value, # type: ignore
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
|
||||
if get_username():
|
||||
|
||||
@@ -322,9 +412,7 @@ class BaseAuditModel(models.Model):
|
||||
AuditLog.audit_object_delete(
|
||||
get_username(),
|
||||
object_class.__name__.lower(),
|
||||
object_class.serialize(self),
|
||||
object_class.serialize(self), # type: ignore
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
|
||||
return super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
|
||||
@@ -2,12 +2,12 @@ from rest_framework import serializers
|
||||
|
||||
from tacticalrmm.utils import get_default_timezone
|
||||
|
||||
from .models import AuditLog, PendingAction
|
||||
from .models import AuditLog, DebugLog, PendingAction
|
||||
|
||||
|
||||
class AuditLogSerializer(serializers.ModelSerializer):
|
||||
|
||||
entry_time = serializers.SerializerMethodField(read_only=True)
|
||||
ip_address = serializers.ReadOnlyField(source="debug_info.ip")
|
||||
|
||||
class Meta:
|
||||
model = AuditLog
|
||||
@@ -19,7 +19,6 @@ class AuditLogSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class PendingActionSerializer(serializers.ModelSerializer):
|
||||
|
||||
hostname = serializers.ReadOnlyField(source="agent.hostname")
|
||||
salt_id = serializers.ReadOnlyField(source="agent.salt_id")
|
||||
client = serializers.ReadOnlyField(source="agent.client.name")
|
||||
@@ -30,3 +29,16 @@ class PendingActionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = PendingAction
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class DebugLogSerializer(serializers.ModelSerializer):
|
||||
agent = serializers.ReadOnlyField(source="agent.hostname")
|
||||
entry_time = serializers.SerializerMethodField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = DebugLog
|
||||
fields = "__all__"
|
||||
|
||||
def get_entry_time(self, log):
|
||||
timezone = get_default_timezone()
|
||||
return log.entry_time.astimezone(timezone).strftime("%m %d %Y %H:%M:%S")
|
||||
|
||||
25
api/tacticalrmm/logs/tasks.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_debug_log(older_than_days: int) -> str:
|
||||
from .models import DebugLog
|
||||
|
||||
DebugLog.objects.filter(
|
||||
entry_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_audit_log(older_than_days: int) -> str:
|
||||
from .models import AuditLog
|
||||
|
||||
AuditLog.objects.filter(
|
||||
entry_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
@@ -1,10 +1,11 @@
|
||||
from datetime import datetime, timedelta
|
||||
from itertools import cycle
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class TestAuditViews(TacticalTestCase):
|
||||
@@ -16,20 +17,23 @@ class TestAuditViews(TacticalTestCase):
|
||||
|
||||
# create clients for client filter
|
||||
site = baker.make("clients.Site")
|
||||
baker.make_recipe("agents.agent", site=site, hostname="AgentHostname1")
|
||||
agent1 = baker.make_recipe("agents.agent", site=site, hostname="AgentHostname1")
|
||||
agent2 = baker.make_recipe("agents.agent", hostname="AgentHostname2")
|
||||
agent0 = baker.make_recipe("agents.agent", hostname="AgentHostname")
|
||||
|
||||
# user jim agent logs
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
username="jim",
|
||||
agent="AgentHostname1",
|
||||
entry_time=seq(datetime.now(), timedelta(days=3)),
|
||||
agent_id=agent1.id,
|
||||
_quantity=15,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
username="jim",
|
||||
agent="AgentHostname2",
|
||||
entry_time=seq(datetime.now(), timedelta(days=100)),
|
||||
agent_id=agent2.id,
|
||||
_quantity=8,
|
||||
)
|
||||
|
||||
@@ -38,14 +42,14 @@ class TestAuditViews(TacticalTestCase):
|
||||
"logs.agent_logs",
|
||||
username="james",
|
||||
agent="AgentHostname1",
|
||||
entry_time=seq(datetime.now(), timedelta(days=55)),
|
||||
agent_id=agent1.id,
|
||||
_quantity=7,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
username="james",
|
||||
agent="AgentHostname2",
|
||||
entry_time=seq(datetime.now(), timedelta(days=20)),
|
||||
agent_id=agent2.id,
|
||||
_quantity=10,
|
||||
)
|
||||
|
||||
@@ -53,7 +57,7 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
agent=seq("AgentHostname"),
|
||||
entry_time=seq(datetime.now(), timedelta(days=29)),
|
||||
agent_id=seq(agent1.id),
|
||||
_quantity=5,
|
||||
)
|
||||
|
||||
@@ -61,7 +65,6 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.object_logs",
|
||||
username="james",
|
||||
entry_time=seq(datetime.now(), timedelta(days=5)),
|
||||
_quantity=17,
|
||||
)
|
||||
|
||||
@@ -69,7 +72,6 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.login_logs",
|
||||
username="james",
|
||||
entry_time=seq(datetime.now(), timedelta(days=7)),
|
||||
_quantity=11,
|
||||
)
|
||||
|
||||
@@ -77,51 +79,62 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.login_logs",
|
||||
username="jim",
|
||||
entry_time=seq(datetime.now(), timedelta(days=11)),
|
||||
_quantity=13,
|
||||
)
|
||||
|
||||
return site
|
||||
return {"site": site, "agents": [agent0, agent1, agent2]}
|
||||
|
||||
def test_get_audit_logs(self):
|
||||
url = "/logs/auditlogs/"
|
||||
|
||||
# create data
|
||||
site = self.create_audit_records()
|
||||
data = self.create_audit_records()
|
||||
|
||||
# test data and result counts
|
||||
data = [
|
||||
{"filter": {"timeFilter": 30}, "count": 86},
|
||||
{
|
||||
"filter": {"timeFilter": 45, "agentFilter": ["AgentHostname2"]},
|
||||
"filter": {
|
||||
"timeFilter": 45,
|
||||
"agentFilter": [data["agents"][2].id],
|
||||
},
|
||||
"count": 19,
|
||||
},
|
||||
{
|
||||
"filter": {"userFilter": ["jim"], "agentFilter": ["AgentHostname1"]},
|
||||
"filter": {
|
||||
"userFilter": ["jim"],
|
||||
"agentFilter": [data["agents"][1].id],
|
||||
},
|
||||
"count": 15,
|
||||
},
|
||||
{
|
||||
"filter": {
|
||||
"timeFilter": 180,
|
||||
"userFilter": ["james"],
|
||||
"agentFilter": ["AgentHostname1"],
|
||||
"agentFilter": [data["agents"][1].id],
|
||||
},
|
||||
"count": 7,
|
||||
},
|
||||
{"filter": {}, "count": 86},
|
||||
{"filter": {"agentFilter": ["DoesntExist"]}, "count": 0},
|
||||
{"filter": {"agentFilter": [500]}, "count": 0},
|
||||
{
|
||||
"filter": {
|
||||
"timeFilter": 35,
|
||||
"userFilter": ["james", "jim"],
|
||||
"agentFilter": ["AgentHostname1", "AgentHostname2"],
|
||||
"agentFilter": [
|
||||
data["agents"][1].id,
|
||||
data["agents"][2].id,
|
||||
],
|
||||
},
|
||||
"count": 40,
|
||||
},
|
||||
{"filter": {"timeFilter": 35, "userFilter": ["james", "jim"]}, "count": 81},
|
||||
{"filter": {"objectFilter": ["user"]}, "count": 26},
|
||||
{"filter": {"actionFilter": ["login"]}, "count": 12},
|
||||
{"filter": {"clientFilter": [site.client.id]}, "count": 23},
|
||||
{
|
||||
"filter": {"clientFilter": [data["site"].client.id]},
|
||||
"count": 23,
|
||||
},
|
||||
]
|
||||
|
||||
pagination = {
|
||||
@@ -137,45 +150,15 @@ class TestAuditViews(TacticalTestCase):
|
||||
)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(
|
||||
len(resp.data["audit_logs"]),
|
||||
len(resp.data["audit_logs"]), # type:ignore
|
||||
pagination["rowsPerPage"]
|
||||
if req["count"] > pagination["rowsPerPage"]
|
||||
else req["count"],
|
||||
)
|
||||
self.assertEqual(resp.data["total"], req["count"])
|
||||
self.assertEqual(resp.data["total"], req["count"]) # type:ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_options_filter(self):
|
||||
url = "/logs/auditlogs/optionsfilter/"
|
||||
|
||||
baker.make_recipe("agents.agent", hostname=seq("AgentHostname"), _quantity=5)
|
||||
baker.make_recipe("agents.agent", hostname=seq("Server"), _quantity=3)
|
||||
baker.make("accounts.User", username=seq("Username"), _quantity=7)
|
||||
baker.make("accounts.User", username=seq("soemthing"), _quantity=3)
|
||||
|
||||
data = [
|
||||
{"req": {"type": "agent", "pattern": "AgeNt"}, "count": 5},
|
||||
{"req": {"type": "agent", "pattern": "AgentHostname1"}, "count": 1},
|
||||
{"req": {"type": "agent", "pattern": "hasjhd"}, "count": 0},
|
||||
{"req": {"type": "user", "pattern": "UsEr"}, "count": 7},
|
||||
{"req": {"type": "user", "pattern": "UserName1"}, "count": 1},
|
||||
{"req": {"type": "user", "pattern": "dfdsadf"}, "count": 0},
|
||||
]
|
||||
|
||||
for req in data:
|
||||
resp = self.client.post(url, req["req"], format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), req["count"])
|
||||
|
||||
# test for invalid payload. needs to have either type: user or agent
|
||||
invalid_data = {"type": "object", "pattern": "SomeString"}
|
||||
|
||||
resp = self.client.post(url, invalid_data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_pending_actions(self):
|
||||
url = "/logs/pendingactions/"
|
||||
agent1 = baker.make_recipe("agents.online_agent")
|
||||
@@ -270,3 +253,87 @@ class TestAuditViews(TacticalTestCase):
|
||||
self.assertEqual(r.data, "error deleting sched task") # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_debug_log(self):
|
||||
url = "/logs/debuglog/"
|
||||
|
||||
# create data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
baker.make(
|
||||
"logs.DebugLog",
|
||||
log_level=cycle(["error", "info", "warning", "critical"]),
|
||||
log_type="agent_issues",
|
||||
agent=agent,
|
||||
_quantity=4,
|
||||
)
|
||||
|
||||
logs = baker.make(
|
||||
"logs.DebugLog",
|
||||
log_type="system_issues",
|
||||
log_level=cycle(["error", "info", "warning", "critical"]),
|
||||
_quantity=15,
|
||||
)
|
||||
|
||||
# test agent filter
|
||||
data = {"agentFilter": agent.id}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 4) # type: ignore
|
||||
|
||||
# test log type filter and agent
|
||||
data = {"agentFilter": agent.id, "logLevelFilter": "warning"}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 1) # type: ignore
|
||||
|
||||
# test time filter with other
|
||||
data = {"logTypeFilter": "system_issues", "logLevelFilter": "error"}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 4) # type: ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestLogTasks(TacticalTestCase):
|
||||
def test_prune_debug_log(self):
|
||||
from .models import DebugLog
|
||||
from .tasks import prune_debug_log
|
||||
|
||||
# setup data
|
||||
debug_log = baker.make(
|
||||
"logs.DebugLog",
|
||||
_quantity=50,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for item in debug_log: # type:ignore
|
||||
item.entry_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
item.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_debug_log(30)
|
||||
|
||||
self.assertEqual(DebugLog.objects.count(), 6)
|
||||
|
||||
def test_prune_audit_log(self):
|
||||
from .models import AuditLog
|
||||
from .tasks import prune_audit_log
|
||||
|
||||
# setup data
|
||||
audit_log = baker.make(
|
||||
"logs.AuditLog",
|
||||
_quantity=50,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for item in audit_log: # type:ignore
|
||||
item.entry_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
item.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_audit_log(30)
|
||||
|
||||
self.assertEqual(AuditLog.objects.count(), 6)
|
||||
|
||||
@@ -5,7 +5,5 @@ from . import views
|
||||
urlpatterns = [
|
||||
path("pendingactions/", views.PendingActions.as_view()),
|
||||
path("auditlogs/", views.GetAuditLogs.as_view()),
|
||||
path("auditlogs/optionsfilter/", views.FilterOptionsAuditLog.as_view()),
|
||||
path("debuglog/<mode>/<hostname>/<order>/", views.debug_log),
|
||||
path("downloadlog/", views.download_log),
|
||||
path("debuglog/", views.GetDebugLog.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,28 +1,23 @@
|
||||
import asyncio
|
||||
import subprocess
|
||||
from datetime import datetime as dt
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.paginator import Paginator
|
||||
from django.db.models import Q
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from accounts.serializers import UserSerializer
|
||||
from agents.models import Agent
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
from django.core.paginator import Paginator
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import AuditLog, PendingAction
|
||||
from .models import AuditLog, PendingAction, DebugLog
|
||||
from .permissions import AuditLogPerms, DebugLogPerms, ManagePendingActionPerms
|
||||
from .serializers import AuditLogSerializer, PendingActionSerializer
|
||||
from .serializers import AuditLogSerializer, DebugLogSerializer, PendingActionSerializer
|
||||
|
||||
|
||||
class GetAuditLogs(APIView):
|
||||
@@ -48,7 +43,7 @@ class GetAuditLogs(APIView):
|
||||
timeFilter = Q()
|
||||
|
||||
if "agentFilter" in request.data:
|
||||
agentFilter = Q(agent__in=request.data["agentFilter"])
|
||||
agentFilter = Q(agent_id__in=request.data["agentFilter"])
|
||||
|
||||
elif "clientFilter" in request.data:
|
||||
clients = Client.objects.filter(
|
||||
@@ -95,25 +90,6 @@ class GetAuditLogs(APIView):
|
||||
)
|
||||
|
||||
|
||||
class FilterOptionsAuditLog(APIView):
|
||||
permission_classes = [IsAuthenticated, AuditLogPerms]
|
||||
|
||||
def post(self, request):
|
||||
if request.data["type"] == "agent":
|
||||
agents = Agent.objects.filter(hostname__icontains=request.data["pattern"])
|
||||
return Response(AgentHostnameSerializer(agents, many=True).data)
|
||||
|
||||
if request.data["type"] == "user":
|
||||
users = User.objects.filter(
|
||||
username__icontains=request.data["pattern"],
|
||||
agent=None,
|
||||
is_installer_user=False,
|
||||
)
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
|
||||
return Response("error", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class PendingActions(APIView):
|
||||
permission_classes = [IsAuthenticated, ManagePendingActionPerms]
|
||||
|
||||
@@ -158,60 +134,28 @@ class PendingActions(APIView):
|
||||
return Response(f"{action.agent.hostname}: {action.description} was cancelled")
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([IsAuthenticated, DebugLogPerms])
|
||||
def debug_log(request, mode, hostname, order):
|
||||
log_file = settings.LOG_CONFIG["handlers"][0]["sink"]
|
||||
class GetDebugLog(APIView):
|
||||
permission_classes = [IsAuthenticated, DebugLogPerms]
|
||||
|
||||
agents = Agent.objects.prefetch_related("site").only("pk", "hostname")
|
||||
agent_hostnames = AgentHostnameSerializer(agents, many=True)
|
||||
def patch(self, request):
|
||||
|
||||
switch_mode = {
|
||||
"info": "INFO",
|
||||
"critical": "CRITICAL",
|
||||
"error": "ERROR",
|
||||
"warning": "WARNING",
|
||||
}
|
||||
level = switch_mode.get(mode, "INFO")
|
||||
agentFilter = Q()
|
||||
logTypeFilter = Q()
|
||||
logLevelFilter = Q()
|
||||
|
||||
if hostname == "all" and order == "latest":
|
||||
cmd = f"grep -h {level} {log_file} | tac"
|
||||
elif hostname == "all" and order == "oldest":
|
||||
cmd = f"grep -h {level} {log_file}"
|
||||
elif hostname != "all" and order == "latest":
|
||||
cmd = f"grep {hostname} {log_file} | grep -h {level} | tac"
|
||||
elif hostname != "all" and order == "oldest":
|
||||
cmd = f"grep {hostname} {log_file} | grep -h {level}"
|
||||
else:
|
||||
return Response("error", status=status.HTTP_400_BAD_REQUEST)
|
||||
if "logTypeFilter" in request.data:
|
||||
logTypeFilter = Q(log_type=request.data["logTypeFilter"])
|
||||
|
||||
contents = subprocess.run(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
shell=True,
|
||||
)
|
||||
if "logLevelFilter" in request.data:
|
||||
logLevelFilter = Q(log_level=request.data["logLevelFilter"])
|
||||
|
||||
if not contents.stdout:
|
||||
resp = f"No {mode} logs"
|
||||
else:
|
||||
resp = contents.stdout
|
||||
if "agentFilter" in request.data:
|
||||
agentFilter = Q(agent=request.data["agentFilter"])
|
||||
|
||||
return Response({"log": resp, "agents": agent_hostnames.data})
|
||||
debug_logs = (
|
||||
DebugLog.objects.filter(logLevelFilter)
|
||||
.filter(agentFilter)
|
||||
.filter(logTypeFilter)
|
||||
)
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([IsAuthenticated, DebugLogPerms])
|
||||
def download_log(request):
|
||||
log_file = settings.LOG_CONFIG["handlers"][0]["sink"]
|
||||
if settings.DEBUG:
|
||||
with open(log_file, "rb") as f:
|
||||
response = HttpResponse(f.read(), content_type="text/plain")
|
||||
response["Content-Disposition"] = "attachment; filename=debug.log"
|
||||
return response
|
||||
else:
|
||||
response = HttpResponse()
|
||||
response["Content-Disposition"] = "attachment; filename=debug.log"
|
||||
response["X-Accel-Redirect"] = "/private/log/debug.log"
|
||||
return response
|
||||
return Response(DebugLogSerializer(debug_logs, many=True).data)
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
asgiref==3.3.4
|
||||
asgiref==3.4.1
|
||||
asyncio-nats-client==0.11.4
|
||||
celery==5.1.1
|
||||
celery==5.1.2
|
||||
certifi==2021.5.30
|
||||
cffi==1.14.5
|
||||
channels==3.0.3
|
||||
channels_redis==3.2.0
|
||||
cffi==1.14.6
|
||||
channels==3.0.4
|
||||
channels_redis==3.3.0
|
||||
chardet==4.0.0
|
||||
cryptography==3.4.7
|
||||
cryptography==3.4.8
|
||||
daphne==3.0.2
|
||||
Django==3.2.4
|
||||
django-cors-headers==3.7.0
|
||||
Django==3.2.6
|
||||
django-cors-headers==3.8.0
|
||||
django-ipware==3.0.2
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.4
|
||||
future==0.18.2
|
||||
loguru==0.5.3
|
||||
msgpack==1.0.2
|
||||
packaging==20.9
|
||||
packaging==21.0
|
||||
psycopg2-binary==2.9.1
|
||||
pycparser==2.20
|
||||
pycryptodome==3.10.1
|
||||
@@ -24,13 +25,13 @@ pyparsing==2.4.7
|
||||
pytz==2021.1
|
||||
qrcode==6.1
|
||||
redis==3.5.3
|
||||
requests==2.25.1
|
||||
requests==2.26.0
|
||||
six==1.16.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.60.0
|
||||
urllib3==1.26.5
|
||||
twilio==6.63.1
|
||||
urllib3==1.26.6
|
||||
uWSGI==2.0.19.1
|
||||
validators==0.18.2
|
||||
vine==5.0.0
|
||||
websockets==9.1
|
||||
zipp==3.4.1
|
||||
zipp==3.5.0
|
||||
|
||||
@@ -175,11 +175,29 @@
|
||||
"name": "Screenconnect - Get GUID for client",
|
||||
"description": "Returns Screenconnect GUID for client - Use with Custom Fields for later use. ",
|
||||
"args": [
|
||||
"-serviceName {{client.ScreenConnectService}}"
|
||||
"{{client.ScreenConnectService}}"
|
||||
],
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "9cfdfe8f-82bf-4081-a59f-576d694f4649",
|
||||
"filename": "Win_Teamviewer_Get_ID.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "TeamViewer - Get ClientID for client",
|
||||
"description": "Returns Teamviwer ClientID for client - Use with Custom Fields for later use. ",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "e43081d4-6f71-4ce3-881a-22da749f7a57",
|
||||
"filename": "Win_AnyDesk_Get_Anynet_ID.ps1",
|
||||
"submittedBy": "https://github.com/meuchels",
|
||||
"name": "AnyDesk - Get AnyNetID for client",
|
||||
"description": "Returns AnyNetID for client - Use with Custom Fields for later use. ",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "95a2ee6f-b89b-4551-856e-3081b041caa7",
|
||||
"filename": "Win_Power_Profile_Reset_High_Performance_to_Defaults.ps1",
|
||||
@@ -226,6 +244,30 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "907652a5-9ec1-4759-9871-a7743f805ff2",
|
||||
"filename": "Win_Software_Uninstall.ps1",
|
||||
"submittedBy": "https://github.com/subzdev",
|
||||
"name": "Software Uninstaller - list, find, and uninstall most software",
|
||||
"description": "Allows listing, finding and uninstalling most software on Windows. There will be a best effort to uninstall silently if the silent uninstall string is not provided.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software",
|
||||
"default_timeout": "600"
|
||||
},
|
||||
{
|
||||
"guid": "64c3b1a8-c85f-4800-85a3-485f78a2d9ad",
|
||||
"filename": "Win_Bitdefender_GravityZone_Install.ps1",
|
||||
"submittedBy": "https://github.com/jhtechIL/",
|
||||
"name": "BitDefender Gravity Zone Install",
|
||||
"description": "Installs BitDefender Gravity Zone, requires client custom field setup. See script comments for details",
|
||||
"args": [
|
||||
"-url {{client.bdurl}}",
|
||||
"-exe {{client.bdexe}}"
|
||||
],
|
||||
"default_timeout": "2500",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "da51111c-aff6-4d87-9d76-0608e1f67fe5",
|
||||
"filename": "Win_Defender_Enable.ps1",
|
||||
@@ -254,6 +296,16 @@
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"guid": "0afd8d00-b95b-4318-8d07-0b9bc4424287",
|
||||
"filename": "Win_Feature_NET35_Enable.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Windows Feature - Enable .NET 3.5",
|
||||
"description": "Enables the Windows .NET 3.5 Framework in Turn Features on and off",
|
||||
"shell": "powershell",
|
||||
"default_timeout": "300",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"guid": "24f19ead-fdfe-46b4-9dcb-4cd0e12a3940",
|
||||
"filename": "Win_Speedtest.ps1",
|
||||
@@ -368,14 +420,14 @@
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "5615aa90-0272-427b-8acf-0ca019612501",
|
||||
"filename": "Win_Chocolatey_Update_Installed.bat",
|
||||
"guid": "6c78eb04-57ae-43b0-98ed-cbd3ef9e2f80",
|
||||
"filename": "Win_Chocolatey_Manage_Apps_Bulk.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Update Installed Apps",
|
||||
"description": "Update all apps that were installed using Chocolatey.",
|
||||
"shell": "cmd",
|
||||
"name": "Chocolatey - Install, Uninstall and Upgrade Software",
|
||||
"description": "This script installs, uninstalls and updates software using Chocolatey with logic to slow tasks to minimize hitting community limits. Mode install/uninstall/upgrade Hosts x",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey",
|
||||
"default_timeout": "3600"
|
||||
"default_timeout": "600"
|
||||
},
|
||||
{
|
||||
"guid": "fff8024d-d72e-4457-84fa-6c780f69a16f",
|
||||
@@ -450,6 +502,16 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "93038ae0-58ce-433e-a3b9-bc99ad1ea79a",
|
||||
"filename": "Win_Services_AutomaticStartup_Running.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Ensure all services with startup type Automatic are running",
|
||||
"description": "Gets a list of all service with startup type of Automatic but aren't running and tries to start them",
|
||||
"shell": "powershell",
|
||||
"default_timeout": "300",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "e09895d5-ca13-44a2-a38c-6e77c740f0e8",
|
||||
"filename": "Win_ScreenConnectAIO.ps1",
|
||||
@@ -507,6 +569,16 @@
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "7c0c7e37-60ff-462f-9c34-b5cd4c4796a7",
|
||||
"filename": "Win_Wifi_SSID_and_Password_Retrieval.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network Wireless - Retrieve Saved passwords",
|
||||
"description": "Returns all saved wifi passwords stored on the computer",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "abe78170-7cf9-435b-9666-c5ef6c11a106",
|
||||
"filename": "Win_Network_IPv6_Disable.ps1",
|
||||
@@ -527,6 +599,16 @@
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "5676acca-44e5-46c8-af61-ae795ecb3ef1",
|
||||
"filename": "Win_Network_IP_DHCP_Renew.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network - Release and Renew IP",
|
||||
"description": "Trigger and release and renew of IP address on all network adapters",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "83aa4d51-63ce-41e7-829f-3c16e6115bbf",
|
||||
"filename": "Win_Network_DNS_Set_to_1.1.1.2.ps1",
|
||||
@@ -557,6 +639,16 @@
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "43e65e5f-717a-4b6d-a724-1a86229fcd42",
|
||||
"filename": "Win_Activation_Check.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Windows Activation check",
|
||||
"description": "Checks to see if windows is activated and returns status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "120"
|
||||
},
|
||||
{
|
||||
"guid": "83f6c6ea-6120-4fd3-bec8-d3abc505dcdf",
|
||||
"filename": "Win_TRMM_Start_Menu_Delete_Shortcut.ps1",
|
||||
|
||||
22
api/tacticalrmm/scripts/migrations/0009_scriptsnippet.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 19:25
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0008_script_guid'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ScriptSnippet',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=40)),
|
||||
('code', models.TextField()),
|
||||
('shell', models.CharField(choices=[('powershell', 'Powershell'), ('cmd', 'Batch (CMD)'), ('python', 'Python')], max_length=15)),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-26 16:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0009_scriptsnippet'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='scriptsnippet',
|
||||
name='desc',
|
||||
field=models.CharField(blank=True, max_length=50, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='script',
|
||||
name='code_base64',
|
||||
field=models.TextField(blank=True, default='', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='script',
|
||||
name='description',
|
||||
field=models.TextField(blank=True, default='', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='name',
|
||||
field=models.CharField(max_length=40, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-31 17:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0010_auto_20210726_1634'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='code',
|
||||
field=models.TextField(default=''),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='desc',
|
||||
field=models.CharField(blank=True, default='', max_length=50),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='shell',
|
||||
field=models.CharField(choices=[('powershell', 'Powershell'), ('cmd', 'Batch (CMD)'), ('python', 'Python')], default='powershell', max_length=15),
|
||||
),
|
||||
]
|
||||
@@ -1,12 +1,10 @@
|
||||
import base64
|
||||
import re
|
||||
from typing import List, Optional
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
|
||||
from django.db.models.fields import CharField, TextField
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.utils import replace_db_values
|
||||
|
||||
@@ -21,13 +19,11 @@ SCRIPT_TYPES = [
|
||||
("builtin", "Built In"),
|
||||
]
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class Script(BaseAuditModel):
|
||||
guid = name = models.CharField(max_length=64, null=True, blank=True)
|
||||
guid = models.CharField(max_length=64, null=True, blank=True)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(null=True, blank=True)
|
||||
description = models.TextField(null=True, blank=True, default="")
|
||||
filename = models.CharField(max_length=255) # deprecated
|
||||
shell = models.CharField(
|
||||
max_length=100, choices=SCRIPT_SHELLS, default="powershell"
|
||||
@@ -43,20 +39,44 @@ class Script(BaseAuditModel):
|
||||
)
|
||||
favorite = models.BooleanField(default=False)
|
||||
category = models.CharField(max_length=100, null=True, blank=True)
|
||||
code_base64 = models.TextField(null=True, blank=True)
|
||||
code_base64 = models.TextField(null=True, blank=True, default="")
|
||||
default_timeout = models.PositiveIntegerField(default=90)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
def code_no_snippets(self):
|
||||
if self.code_base64:
|
||||
base64_bytes = self.code_base64.encode("ascii", "ignore")
|
||||
return base64.b64decode(base64_bytes).decode("ascii", "ignore")
|
||||
return base64.b64decode(self.code_base64.encode("ascii", "ignore")).decode(
|
||||
"ascii", "ignore"
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
return self.replace_with_snippets(self.code_no_snippets)
|
||||
|
||||
@classmethod
|
||||
def replace_with_snippets(cls, code):
|
||||
# check if snippet has been added to script body
|
||||
matches = re.finditer(r"{{(.*)}}", code)
|
||||
if matches:
|
||||
replaced_code = code
|
||||
for snippet in matches:
|
||||
snippet_name = snippet.group(1).strip()
|
||||
if ScriptSnippet.objects.filter(name=snippet_name).exists():
|
||||
value = ScriptSnippet.objects.get(name=snippet_name).code
|
||||
else:
|
||||
value = ""
|
||||
|
||||
replaced_code = re.sub(snippet.group(), value, replaced_code)
|
||||
|
||||
return replaced_code
|
||||
else:
|
||||
return code
|
||||
|
||||
@classmethod
|
||||
def load_community_scripts(cls):
|
||||
import json
|
||||
@@ -97,20 +117,20 @@ class Script(BaseAuditModel):
|
||||
|
||||
if s.exists():
|
||||
i = s.first()
|
||||
i.name = script["name"]
|
||||
i.description = script["description"]
|
||||
i.category = category
|
||||
i.shell = script["shell"]
|
||||
i.default_timeout = default_timeout
|
||||
i.args = args
|
||||
i.name = script["name"] # type: ignore
|
||||
i.description = script["description"] # type: ignore
|
||||
i.category = category # type: ignore
|
||||
i.shell = script["shell"] # type: ignore
|
||||
i.default_timeout = default_timeout # type: ignore
|
||||
i.args = args # type: ignore
|
||||
|
||||
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||
script_bytes = (
|
||||
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||
)
|
||||
i.code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||
i.code_base64 = base64.b64encode(script_bytes).decode("ascii") # type: ignore
|
||||
|
||||
i.save(
|
||||
i.save( # type: ignore
|
||||
update_fields=[
|
||||
"name",
|
||||
"description",
|
||||
@@ -175,7 +195,6 @@ class Script(BaseAuditModel):
|
||||
guid=script["guid"],
|
||||
name=script["name"],
|
||||
description=script["description"],
|
||||
filename=script["filename"],
|
||||
shell=script["shell"],
|
||||
script_type="builtin",
|
||||
category=category,
|
||||
@@ -209,7 +228,7 @@ class Script(BaseAuditModel):
|
||||
if match:
|
||||
# only get the match between the () in regex
|
||||
string = match.group(1)
|
||||
value = replace_db_values(string=string, agent=agent, shell=shell)
|
||||
value = replace_db_values(string=string, instance=agent, shell=shell)
|
||||
|
||||
if value:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg))
|
||||
@@ -221,3 +240,13 @@ class Script(BaseAuditModel):
|
||||
temp_args.append(arg)
|
||||
|
||||
return temp_args
|
||||
|
||||
|
||||
class ScriptSnippet(models.Model):
|
||||
name = CharField(max_length=40, unique=True)
|
||||
desc = CharField(max_length=50, blank=True, default="")
|
||||
code = TextField(default="")
|
||||
shell = CharField(max_length=15, choices=SCRIPT_SHELLS, default="powershell")
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||
|
||||
from .models import Script
|
||||
from .models import Script, ScriptSnippet
|
||||
|
||||
|
||||
class ScriptTableSerializer(ModelSerializer):
|
||||
@@ -41,3 +41,9 @@ class ScriptCheckSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Script
|
||||
fields = ["code", "shell"]
|
||||
|
||||
|
||||
class ScriptSnippetSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = ScriptSnippet
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
import asyncio
|
||||
|
||||
from agents.models import Agent
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent, AgentHistory
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
def handle_bulk_command_task(
|
||||
agentpks, cmd, shell, timeout, username, run_on_offline=False
|
||||
) -> None:
|
||||
nats_data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
@@ -16,11 +20,31 @@ def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
},
|
||||
}
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="cmd_run",
|
||||
command=cmd,
|
||||
username=username,
|
||||
)
|
||||
nats_data["id"] = hist.pk
|
||||
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout) -> None:
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout, username) -> None:
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
agent.run_script(scriptpk=script.pk, args=args, timeout=timeout)
|
||||
history_pk = 0
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="script_run",
|
||||
script=script,
|
||||
username=username,
|
||||
)
|
||||
history_pk = hist.pk
|
||||
agent.run_script(
|
||||
scriptpk=script.pk, args=args, timeout=timeout, history_pk=history_pk
|
||||
)
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from model_bakery import baker
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Script
|
||||
from .serializers import ScriptSerializer, ScriptTableSerializer
|
||||
from .models import Script, ScriptSnippet
|
||||
from .serializers import (
|
||||
ScriptSerializer,
|
||||
ScriptTableSerializer,
|
||||
ScriptSnippetSerializer,
|
||||
)
|
||||
|
||||
|
||||
class TestScriptViews(TacticalTestCase):
|
||||
@@ -18,7 +21,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.authenticate()
|
||||
|
||||
def test_get_scripts(self):
|
||||
url = "/scripts/scripts/"
|
||||
url = "/scripts/"
|
||||
scripts = baker.make("scripts.Script", _quantity=3)
|
||||
|
||||
serializer = ScriptTableSerializer(scripts, many=True)
|
||||
@@ -29,14 +32,14 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_script(self):
|
||||
url = f"/scripts/scripts/"
|
||||
url = f"/scripts/"
|
||||
|
||||
data = {
|
||||
"name": "Name",
|
||||
"description": "Description",
|
||||
"shell": "powershell",
|
||||
"category": "New",
|
||||
"code": "Some Test Code\nnew Line",
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"default_timeout": 99,
|
||||
"args": ["hello", "world", r"{{agent.public_ip}}"],
|
||||
"favorite": False,
|
||||
@@ -46,47 +49,24 @@ class TestScriptViews(TacticalTestCase):
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Script.objects.filter(name="Name").exists())
|
||||
self.assertEqual(Script.objects.get(name="Name").code, data["code"])
|
||||
|
||||
# test with file upload
|
||||
# file with 'Test' as content
|
||||
file = SimpleUploadedFile(
|
||||
"test_script.bat", b"\x54\x65\x73\x74", content_type="text/plain"
|
||||
)
|
||||
data = {
|
||||
"name": "New Name",
|
||||
"description": "Description",
|
||||
"shell": "cmd",
|
||||
"category": "New",
|
||||
"filename": file,
|
||||
"default_timeout": 4455,
|
||||
"args": json.dumps(
|
||||
["hello", "world", r"{{agent.public_ip}}"]
|
||||
), # simulate javascript's JSON.stringify() for formData
|
||||
}
|
||||
|
||||
# test with file upload
|
||||
resp = self.client.post(url, data, format="multipart")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
script = Script.objects.filter(name="New Name").first()
|
||||
self.assertEquals(script.code, "Test")
|
||||
self.assertEqual(Script.objects.get(name="Name").code, "Test")
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_modify_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.put("/scripts/500/script/", format="json")
|
||||
resp = self.client.put("/scripts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# make a userdefined script
|
||||
script = baker.make_recipe("scripts.script")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/"
|
||||
|
||||
data = {
|
||||
"name": script.name,
|
||||
"description": "Description Change",
|
||||
"shell": script.shell,
|
||||
"code": "Test Code\nAnother Line",
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"default_timeout": 13344556,
|
||||
}
|
||||
|
||||
@@ -95,16 +75,18 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
script = Script.objects.get(pk=script.pk)
|
||||
self.assertEquals(script.description, "Description Change")
|
||||
self.assertEquals(script.code, "Test Code\nAnother Line")
|
||||
self.assertEquals(script.code, "Test")
|
||||
|
||||
# test edit a builtin script
|
||||
|
||||
data = {"name": "New Name", "description": "New Desc", "code": "Some New Code"}
|
||||
data = {
|
||||
"name": "New Name",
|
||||
"description": "New Desc",
|
||||
"code_base64": "VGVzdA==",
|
||||
} # Test
|
||||
builtin_script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||
|
||||
resp = self.client.put(
|
||||
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
||||
)
|
||||
resp = self.client.put(f"/scripts/{builtin_script.pk}/", data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
data = {
|
||||
@@ -112,13 +94,11 @@ class TestScriptViews(TacticalTestCase):
|
||||
"description": "Description Change",
|
||||
"shell": script.shell,
|
||||
"favorite": True,
|
||||
"code": "Test Code\nAnother Line",
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"default_timeout": 54345,
|
||||
}
|
||||
# test marking a builtin script as favorite
|
||||
resp = self.client.put(
|
||||
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
||||
)
|
||||
resp = self.client.put(f"/scripts/{builtin_script.pk}/", data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Script.objects.get(pk=builtin_script.pk).favorite)
|
||||
|
||||
@@ -126,11 +106,11 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
def test_get_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.get("/scripts/500/script/", format="json")
|
||||
resp = self.client.get("/scripts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
script = baker.make("scripts.Script")
|
||||
url = f"/scripts/{script.pk}/script/" # type: ignore
|
||||
url = f"/scripts/{script.pk}/" # type: ignore
|
||||
serializer = ScriptSerializer(script)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -138,14 +118,34 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_test_script(self, run_script):
|
||||
url = "/scripts/testscript/"
|
||||
|
||||
run_script.return_value = "return value"
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
data = {
|
||||
"agent": agent.pk,
|
||||
"code": "some_code",
|
||||
"timeout": 90,
|
||||
"args": [],
|
||||
"shell": "powershell",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, "return value") # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_delete_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.delete("/scripts/500/script/", format="json")
|
||||
resp = self.client.delete("/scripts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete script
|
||||
script = baker.make_recipe("scripts.script")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/"
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
@@ -153,7 +153,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
# test delete community script
|
||||
script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/"
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
@@ -161,7 +161,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
def test_download_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.get("/scripts/500/download/", format="json")
|
||||
resp = self.client.get("/scripts/download/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# return script code property should be "Test"
|
||||
@@ -170,7 +170,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
script = baker.make(
|
||||
"scripts.Script", code_base64="VGVzdA==", shell="powershell"
|
||||
)
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
url = f"/scripts/download/{script.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -178,7 +178,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
# test batch file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="cmd")
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
url = f"/scripts/download/{script.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -186,7 +186,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
# test python file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="python")
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
url = f"/scripts/download/{script.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -497,3 +497,106 @@ class TestScriptViews(TacticalTestCase):
|
||||
["-Parameter", "-Another $True"],
|
||||
Script.parse_script_args(agent=agent, shell="powershell", args=args),
|
||||
)
|
||||
|
||||
|
||||
class TestScriptSnippetViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.authenticate()
|
||||
|
||||
def test_get_script_snippets(self):
|
||||
url = "/scripts/snippets/"
|
||||
snippets = baker.make("scripts.ScriptSnippet", _quantity=3)
|
||||
|
||||
serializer = ScriptSnippetSerializer(snippets, many=True)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_script_snippet(self):
|
||||
url = f"/scripts/snippets/"
|
||||
|
||||
data = {
|
||||
"name": "Name",
|
||||
"description": "Description",
|
||||
"shell": "powershell",
|
||||
"code": "Test",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(ScriptSnippet.objects.filter(name="Name").exists())
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_modify_script_snippet(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.put("/scripts/snippets/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# make a userdefined script
|
||||
snippet = baker.make("scripts.ScriptSnippet", name="Test")
|
||||
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
|
||||
|
||||
data = {"name": "New Name"} # type: ignore
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
snippet = ScriptSnippet.objects.get(pk=snippet.pk) # type: ignore
|
||||
self.assertEquals(snippet.name, "New Name")
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_get_script_snippet(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.get("/scripts/snippets/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
snippet = baker.make("scripts.ScriptSnippet")
|
||||
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
|
||||
serializer = ScriptSnippetSerializer(snippet)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_delete_script_snippet(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.delete("/scripts/snippets/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete script snippet
|
||||
snippet = baker.make("scripts.ScriptSnippet")
|
||||
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.assertFalse(ScriptSnippet.objects.filter(pk=snippet.pk).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_snippet_replacement(self):
|
||||
|
||||
snippet1 = baker.make(
|
||||
"scripts.ScriptSnippet", name="snippet1", code="Snippet 1 Code"
|
||||
)
|
||||
snippet2 = baker.make(
|
||||
"scripts.ScriptSnippet", name="snippet2", code="Snippet 2 Code"
|
||||
)
|
||||
|
||||
test_no_snippet = "No Snippets Here"
|
||||
test_with_snippet = "Snippet 1: {{snippet1}}\nSnippet 2: {{snippet2}}"
|
||||
|
||||
# test putting snippet in text
|
||||
result = Script.replace_with_snippets(test_with_snippet)
|
||||
self.assertEqual(
|
||||
result,
|
||||
f"Snippet 1: {snippet1.code}\nSnippet 2: {snippet2.code}", # type:ignore
|
||||
)
|
||||
|
||||
# test text with no snippets
|
||||
result = Script.replace_with_snippets(test_no_snippet)
|
||||
self.assertEqual(result, test_no_snippet)
|
||||
|
||||
@@ -3,7 +3,10 @@ from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("scripts/", views.GetAddScripts.as_view()),
|
||||
path("<int:pk>/script/", views.GetUpdateDeleteScript.as_view()),
|
||||
path("<int:pk>/download/", views.download),
|
||||
path("", views.GetAddScripts.as_view()),
|
||||
path("<int:pk>/", views.GetUpdateDeleteScript.as_view()),
|
||||
path("snippets/", views.GetAddScriptSnippets.as_view()),
|
||||
path("snippets/<int:pk>/", views.GetUpdateDeleteScriptSnippet.as_view()),
|
||||
path("testscript/", views.TestScript.as_view()),
|
||||
path("download/<int:pk>/", views.download),
|
||||
]
|
||||
|
||||
@@ -1,64 +1,39 @@
|
||||
import base64
|
||||
import json
|
||||
import asyncio
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.parsers import FileUploadParser
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import Script
|
||||
from .models import Script, ScriptSnippet
|
||||
from .permissions import ManageScriptsPerms
|
||||
from .serializers import ScriptSerializer, ScriptTableSerializer
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from agents.permissions import RunScriptPerms
|
||||
from .serializers import (
|
||||
ScriptSerializer,
|
||||
ScriptTableSerializer,
|
||||
ScriptSnippetSerializer,
|
||||
)
|
||||
|
||||
|
||||
class GetAddScripts(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageScriptsPerms]
|
||||
parser_class = (FileUploadParser,)
|
||||
|
||||
def get(self, request):
|
||||
scripts = Script.objects.all()
|
||||
|
||||
showCommunityScripts = request.GET.get("showCommunityScripts", True)
|
||||
if not showCommunityScripts or showCommunityScripts == "false":
|
||||
scripts = Script.objects.filter(script_type="userdefined")
|
||||
else:
|
||||
scripts = Script.objects.all()
|
||||
|
||||
return Response(ScriptTableSerializer(scripts, many=True).data)
|
||||
|
||||
def post(self, request, format=None):
|
||||
data = {
|
||||
"name": request.data["name"],
|
||||
"category": request.data["category"],
|
||||
"description": request.data["description"],
|
||||
"shell": request.data["shell"],
|
||||
"default_timeout": request.data["default_timeout"],
|
||||
"script_type": "userdefined", # force all uploads to be userdefined. built in scripts cannot be edited by user
|
||||
}
|
||||
def post(self, request):
|
||||
|
||||
# code editor upload
|
||||
if "args" in request.data.keys() and isinstance(request.data["args"], list):
|
||||
data["args"] = request.data["args"]
|
||||
|
||||
# file upload, have to json load it cuz it's formData
|
||||
if "args" in request.data.keys() and "file_upload" in request.data.keys():
|
||||
data["args"] = json.loads(request.data["args"])
|
||||
|
||||
if "favorite" in request.data.keys():
|
||||
data["favorite"] = request.data["favorite"]
|
||||
|
||||
if "filename" in request.data.keys():
|
||||
message_bytes = request.data["filename"].read()
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode(
|
||||
"ascii", "ignore"
|
||||
)
|
||||
|
||||
elif "code" in request.data.keys():
|
||||
message_bytes = request.data["code"].encode("ascii", "ignore")
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||
|
||||
serializer = ScriptSerializer(data=data, partial=True)
|
||||
serializer = ScriptSerializer(data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
|
||||
@@ -85,11 +60,6 @@ class GetUpdateDeleteScript(APIView):
|
||||
else:
|
||||
return notify_error("Community scripts cannot be edited.")
|
||||
|
||||
elif "code" in data:
|
||||
message_bytes = data["code"].encode("ascii")
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||
data.pop("code")
|
||||
|
||||
serializer = ScriptSerializer(data=data, instance=script, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
@@ -107,11 +77,87 @@ class GetUpdateDeleteScript(APIView):
|
||||
return Response(f"{script.name} was deleted!")
|
||||
|
||||
|
||||
class GetAddScriptSnippets(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageScriptsPerms]
|
||||
|
||||
def get(self, request):
|
||||
snippets = ScriptSnippet.objects.all()
|
||||
return Response(ScriptSnippetSerializer(snippets, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
|
||||
serializer = ScriptSnippetSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("Script snippet was saved successfully")
|
||||
|
||||
|
||||
class GetUpdateDeleteScriptSnippet(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageScriptsPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
snippet = get_object_or_404(ScriptSnippet, pk=pk)
|
||||
return Response(ScriptSnippetSerializer(snippet).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
snippet = get_object_or_404(ScriptSnippet, pk=pk)
|
||||
|
||||
serializer = ScriptSnippetSerializer(
|
||||
instance=snippet, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("Script snippet was saved successfully")
|
||||
|
||||
def delete(self, request, pk):
|
||||
snippet = get_object_or_404(ScriptSnippet, pk=pk)
|
||||
snippet.delete()
|
||||
|
||||
return Response("Script snippet was deleted successfully")
|
||||
|
||||
|
||||
class TestScript(APIView):
|
||||
permission_classes = [IsAuthenticated, RunScriptPerms]
|
||||
|
||||
def post(self, request):
|
||||
from .models import Script
|
||||
from agents.models import Agent
|
||||
|
||||
agent = get_object_or_404(Agent, pk=request.data["agent"])
|
||||
|
||||
parsed_args = Script.parse_script_args(
|
||||
self, request.data["shell"], request.data["args"]
|
||||
)
|
||||
|
||||
data = {
|
||||
"func": "runscript",
|
||||
"timeout": request.data["timeout"],
|
||||
"script_args": parsed_args,
|
||||
"payload": {
|
||||
"code": Script.replace_with_snippets(request.data["code"]),
|
||||
"shell": request.data["shell"],
|
||||
},
|
||||
}
|
||||
|
||||
r = asyncio.run(
|
||||
agent.nats_cmd(data, timeout=request.data["timeout"], wait=True)
|
||||
)
|
||||
|
||||
return Response(r)
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([IsAuthenticated, ManageScriptsPerms])
|
||||
def download(request, pk):
|
||||
script = get_object_or_404(Script, pk=pk)
|
||||
|
||||
with_snippets = request.GET.get("with_snippets", True)
|
||||
|
||||
if with_snippets == "false":
|
||||
with_snippets = False
|
||||
|
||||
if script.shell == "powershell":
|
||||
filename = f"{script.name}.ps1"
|
||||
elif script.shell == "cmd":
|
||||
@@ -119,4 +165,9 @@ def download(request, pk):
|
||||
else:
|
||||
filename = f"{script.name}.py"
|
||||
|
||||
return Response({"filename": filename, "code": script.code})
|
||||
return Response(
|
||||
{
|
||||
"filename": filename,
|
||||
"code": script.code if with_snippets else script.code_no_snippets,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,21 +1,16 @@
|
||||
import asyncio
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .permissions import ManageWinSvcsPerms
|
||||
from .serializers import ServicesSerializer
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view()
|
||||
def get_services(request, pk):
|
||||
|
||||
@@ -35,9 +35,13 @@ app.conf.beat_schedule = {
|
||||
"task": "agents.tasks.auto_self_agent_update_task",
|
||||
"schedule": crontab(minute=35, hour="*"),
|
||||
},
|
||||
"monitor-agents": {
|
||||
"task": "agents.tasks.monitor_agents_task",
|
||||
"schedule": crontab(minute="*/7"),
|
||||
"handle-agents": {
|
||||
"task": "agents.tasks.handle_agents_task",
|
||||
"schedule": crontab(minute="*"),
|
||||
},
|
||||
"get-agentinfo": {
|
||||
"task": "agents.tasks.agent_getinfo_task",
|
||||
"schedule": crontab(minute="*"),
|
||||
},
|
||||
"get-wmi": {
|
||||
"task": "agents.tasks.get_wmi_task",
|
||||
|
||||
@@ -2,6 +2,7 @@ import threading
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from ipware import get_client_ip
|
||||
|
||||
request_local = threading.local()
|
||||
|
||||
@@ -67,6 +68,7 @@ class AuditMiddleware:
|
||||
debug_info["view_func"] = view_func.__name__
|
||||
debug_info["view_args"] = view_args
|
||||
debug_info["view_kwargs"] = view_kwargs
|
||||
debug_info["ip"] = request._client_ip
|
||||
|
||||
request_local.debug_info = debug_info
|
||||
|
||||
@@ -83,3 +85,15 @@ class AuditMiddleware:
|
||||
request_local.debug_info = None
|
||||
request_local.username = None
|
||||
return response
|
||||
|
||||
|
||||
class LogIPMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
client_ip, is_routable = get_client_ip(request)
|
||||
|
||||
request._client_ip = client_ip
|
||||
response = self.get_response(request)
|
||||
return response
|
||||
|
||||
@@ -15,23 +15,25 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# latest release
|
||||
TRMM_VERSION = "0.7.2"
|
||||
TRMM_VERSION = "0.8.0"
|
||||
|
||||
# bump this version everytime vue code is changed
|
||||
# to alert user they need to manually refresh their browser
|
||||
APP_VER = "0.0.141"
|
||||
APP_VER = "0.0.142"
|
||||
|
||||
# https://github.com/wh1te909/rmmagent
|
||||
LATEST_AGENT_VER = "1.5.9"
|
||||
LATEST_AGENT_VER = "1.6.0"
|
||||
|
||||
MESH_VER = "0.8.60"
|
||||
MESH_VER = "0.9.15"
|
||||
|
||||
NATS_SERVER_VER = "2.3.3"
|
||||
|
||||
# for the update script, bump when need to recreate venv or npm install
|
||||
PIP_VER = "19"
|
||||
NPM_VER = "19"
|
||||
PIP_VER = "21"
|
||||
NPM_VER = "21"
|
||||
|
||||
SETUPTOOLS_VER = "57.0.0"
|
||||
WHEEL_VER = "0.36.2"
|
||||
SETUPTOOLS_VER = "57.4.0"
|
||||
WHEEL_VER = "0.37.0"
|
||||
|
||||
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
|
||||
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
|
||||
@@ -109,6 +111,7 @@ MIDDLEWARE = [
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"corsheaders.middleware.CorsMiddleware", ##
|
||||
"tacticalrmm.middleware.LogIPMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
@@ -173,12 +176,23 @@ STATIC_URL = "/static/"
|
||||
STATIC_ROOT = os.path.join(BASE_DIR, "static")
|
||||
STATICFILES_DIRS = [os.path.join(BASE_DIR, "tacticalrmm/static/")]
|
||||
|
||||
|
||||
LOG_CONFIG = {
|
||||
"handlers": [{"sink": os.path.join(LOG_DIR, "debug.log"), "serialize": False}]
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"handlers": {
|
||||
"file": {
|
||||
"level": "ERROR",
|
||||
"class": "logging.FileHandler",
|
||||
"filename": os.path.join(LOG_DIR, "django_debug.log"),
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"django.request": {"handlers": ["file"], "level": "ERROR", "propagate": True}
|
||||
},
|
||||
}
|
||||
|
||||
if "AZPIPELINE" in os.environ:
|
||||
print("PIPELINE")
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
|
||||
@@ -4,7 +4,8 @@ from unittest.mock import mock_open, patch
|
||||
|
||||
import requests
|
||||
from django.conf import settings
|
||||
from django.test import TestCase, override_settings
|
||||
from django.test import override_settings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .utils import (
|
||||
bitdays_to_string,
|
||||
@@ -16,7 +17,10 @@ from .utils import (
|
||||
)
|
||||
|
||||
|
||||
class TestUtils(TestCase):
|
||||
class TestUtils(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("requests.post")
|
||||
@patch("__main__.__builtins__.open", new_callable=mock_open)
|
||||
def test_generate_winagent_exe_success(self, m_open, mock_post):
|
||||
@@ -77,7 +81,7 @@ class TestUtils(TestCase):
|
||||
@patch("subprocess.run")
|
||||
def test_run_nats_api_cmd(self, mock_subprocess):
|
||||
ids = ["a", "b", "c"]
|
||||
_ = run_nats_api_cmd("monitor", ids)
|
||||
_ = run_nats_api_cmd("wmi", ids)
|
||||
mock_subprocess.assert_called_once()
|
||||
|
||||
def test_bitdays_to_string(self):
|
||||
|
||||
@@ -15,14 +15,12 @@ from django.conf import settings
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.http import FileResponse
|
||||
from knox.auth import TokenAuthentication
|
||||
from loguru import logger
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CodeSignToken
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import DebugLog
|
||||
from agents.models import Agent
|
||||
|
||||
notify_error = lambda msg: Response(msg, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -61,7 +59,7 @@ def generate_winagent_exe(
|
||||
)
|
||||
|
||||
try:
|
||||
codetoken = CodeSignToken.objects.first().token
|
||||
codetoken = CodeSignToken.objects.first().token # type:ignore
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
@@ -107,7 +105,7 @@ def generate_winagent_exe(
|
||||
break
|
||||
|
||||
if errors:
|
||||
logger.error(errors)
|
||||
DebugLog.error(message=errors)
|
||||
return notify_error(
|
||||
"Something went wrong. Check debug error log for exact error message"
|
||||
)
|
||||
@@ -123,7 +121,7 @@ def generate_winagent_exe(
|
||||
def get_default_timezone():
|
||||
from core.models import CoreSettings
|
||||
|
||||
return pytz.timezone(CoreSettings.objects.first().default_time_zone)
|
||||
return pytz.timezone(CoreSettings.objects.first().default_time_zone) # type:ignore
|
||||
|
||||
|
||||
def get_bit_days(days: list[str]) -> int:
|
||||
@@ -178,28 +176,28 @@ def filter_software(sw: SoftwareList) -> SoftwareList:
|
||||
|
||||
def reload_nats():
|
||||
users = [{"user": "tacticalrmm", "password": settings.SECRET_KEY}]
|
||||
agents = Agent.objects.prefetch_related("user").only("pk", "agent_id")
|
||||
agents = Agent.objects.prefetch_related("user").only(
|
||||
"pk", "agent_id"
|
||||
) # type:ignore
|
||||
for agent in agents:
|
||||
try:
|
||||
users.append(
|
||||
{"user": agent.agent_id, "password": agent.user.auth_token.key}
|
||||
)
|
||||
except:
|
||||
logger.critical(
|
||||
f"{agent.hostname} does not have a user account, NATS will not work"
|
||||
DebugLog.critical(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} does not have a user account, NATS will not work",
|
||||
)
|
||||
|
||||
domain = settings.ALLOWED_HOSTS[0].split(".", 1)[1]
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
if hasattr(settings, "CERT_FILE") and hasattr(settings, "KEY_FILE"):
|
||||
if os.path.exists(settings.CERT_FILE) and os.path.exists(settings.KEY_FILE):
|
||||
cert_file = settings.CERT_FILE
|
||||
key_file = settings.KEY_FILE
|
||||
else:
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
else:
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
|
||||
config = {
|
||||
"tls": {
|
||||
@@ -207,7 +205,7 @@ def reload_nats():
|
||||
"key_file": key_file,
|
||||
},
|
||||
"authorization": {"users": users},
|
||||
"max_payload": 2048576005,
|
||||
"max_payload": 67108864,
|
||||
}
|
||||
|
||||
conf = os.path.join(settings.BASE_DIR, "nats-rmm.conf")
|
||||
@@ -248,21 +246,34 @@ KnoxAuthMiddlewareStack = lambda inner: KnoxAuthMiddlewareInstance(
|
||||
)
|
||||
|
||||
|
||||
def run_nats_api_cmd(mode: str, ids: list[str], timeout: int = 30) -> None:
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"agents": ids,
|
||||
}
|
||||
def run_nats_api_cmd(mode: str, ids: list[str] = [], timeout: int = 30) -> None:
|
||||
if mode == "wmi":
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"agents": ids,
|
||||
}
|
||||
else:
|
||||
db = settings.DATABASES["default"]
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"user": db["USER"],
|
||||
"pass": db["PASSWORD"],
|
||||
"host": db["HOST"],
|
||||
"port": int(db["PORT"]),
|
||||
"dbname": db["NAME"],
|
||||
}
|
||||
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", mode]
|
||||
try:
|
||||
subprocess.run(cmd, capture_output=True, timeout=timeout)
|
||||
subprocess.run(cmd, timeout=timeout)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(message=e)
|
||||
|
||||
|
||||
def get_latest_trmm_ver() -> str:
|
||||
@@ -277,15 +288,16 @@ def get_latest_trmm_ver() -> str:
|
||||
if "TRMM_VERSION" in line:
|
||||
return line.split(" ")[2].strip('"')
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(message=e)
|
||||
|
||||
return "error"
|
||||
|
||||
|
||||
def replace_db_values(
|
||||
string: str, agent: Agent = None, shell: str = None, quotes=True
|
||||
string: str, instance=None, shell: str = None, quotes=True # type:ignore
|
||||
) -> Union[str, None]:
|
||||
from core.models import CustomField, GlobalKVStore
|
||||
from clients.models import Client, Site
|
||||
|
||||
# split by period if exists. First should be model and second should be property i.e {{client.name}}
|
||||
temp = string.split(".")
|
||||
@@ -293,7 +305,7 @@ def replace_db_values(
|
||||
# check for model and property
|
||||
if len(temp) < 2:
|
||||
# ignore arg since it is invalid
|
||||
return None
|
||||
return ""
|
||||
|
||||
# value is in the global keystore and replace value
|
||||
if temp[0] == "global":
|
||||
@@ -302,30 +314,48 @@ def replace_db_values(
|
||||
|
||||
return f"'{value}'" if quotes else value
|
||||
else:
|
||||
logger.error(
|
||||
f"Couldn't lookup value for: {string}. Make sure it exists in CoreSettings > Key Store"
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f"{agent.hostname} Couldn't lookup value for: {string}. Make sure it exists in CoreSettings > Key Store", # type:ignore
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
if not agent:
|
||||
# agent must be set if not global property
|
||||
return f"There was an error finding the agent: {agent}"
|
||||
if not instance:
|
||||
# instance must be set if not global property
|
||||
return ""
|
||||
|
||||
if temp[0] == "client":
|
||||
model = "client"
|
||||
obj = agent.client
|
||||
if isinstance(instance, Client):
|
||||
obj = instance
|
||||
elif hasattr(instance, "client"):
|
||||
obj = instance.client
|
||||
else:
|
||||
obj = None
|
||||
elif temp[0] == "site":
|
||||
model = "site"
|
||||
obj = agent.site
|
||||
if isinstance(instance, Site):
|
||||
obj = instance
|
||||
elif hasattr(instance, "site"):
|
||||
obj = instance.site
|
||||
else:
|
||||
obj = None
|
||||
elif temp[0] == "agent":
|
||||
model = "agent"
|
||||
obj = agent
|
||||
if isinstance(instance, Agent):
|
||||
obj = instance
|
||||
else:
|
||||
obj = None
|
||||
else:
|
||||
# ignore arg since it is invalid
|
||||
logger.error(
|
||||
f"Not enough information to find value for: {string}. Only agent, site, client, and global are supported."
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f"{instance} Not enough information to find value for: {string}. Only agent, site, client, and global are supported.",
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
if not obj:
|
||||
return ""
|
||||
|
||||
if hasattr(obj, temp[1]):
|
||||
value = f"'{getattr(obj, temp[1])}'" if quotes else getattr(obj, temp[1])
|
||||
@@ -359,19 +389,21 @@ def replace_db_values(
|
||||
|
||||
else:
|
||||
# ignore arg since property is invalid
|
||||
logger.error(
|
||||
f"Couldn't find property on supplied variable: {string}. Make sure it exists as a custom field or a valid agent property"
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f"{instance} Couldn't find property on supplied variable: {string}. Make sure it exists as a custom field or a valid agent property",
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
# log any unhashable type errors
|
||||
if value != None:
|
||||
return value # type: ignore
|
||||
else:
|
||||
logger.error(
|
||||
f"Couldn't lookup value for: {string}. Make sure it exists as a custom field or a valid agent property"
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f" {instance}({instance.pk}) Couldn't lookup value for: {string}. Make sure it exists as a custom field or a valid agent property",
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
|
||||
def format_shell_array(value: list) -> str:
|
||||
|
||||
@@ -3,15 +3,12 @@ import datetime as dt
|
||||
import time
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import DebugLog
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -120,7 +117,11 @@ def check_agent_update_schedule_task():
|
||||
|
||||
if install:
|
||||
# initiate update on agent asynchronously and don't worry about ret code
|
||||
logger.info(f"Installing windows updates on {agent.salt_id}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="windows_updates",
|
||||
message=f"Installing windows updates on {agent.hostname}",
|
||||
)
|
||||
nats_data = {
|
||||
"func": "installwinupdates",
|
||||
"guids": agent.get_approved_update_guids(),
|
||||
|
||||
@@ -8,7 +8,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
Debian10:
|
||||
AGENT_NAME: "azpipelines-deb10"
|
||||
AGENT_NAME: "az-pipeline-fran"
|
||||
|
||||
pool:
|
||||
name: linux-vms
|
||||
@@ -20,6 +20,7 @@ jobs:
|
||||
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS pipeline'
|
||||
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS test_pipeline'
|
||||
sudo -u postgres psql -c 'CREATE DATABASE pipeline'
|
||||
sudo -u postgres psql -c "SET client_encoding = 'UTF8'" pipeline
|
||||
SETTINGS_FILE="/myagent/_work/1/s/api/tacticalrmm/tacticalrmm/settings.py"
|
||||
rm -rf /myagent/_work/1/s/api/env
|
||||
cd /myagent/_work/1/s/api
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="14"
|
||||
SCRIPT_VERSION="15"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
@@ -80,7 +80,7 @@ if [ -f "${sysd}/daphne.service" ]; then
|
||||
sudo cp ${sysd}/daphne.service ${tmp_dir}/systemd/
|
||||
fi
|
||||
|
||||
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
|
||||
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/django_debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
|
||||
cp /rmm/api/tacticalrmm/tacticalrmm/local_settings.py /rmm/api/tacticalrmm/app.ini ${tmp_dir}/rmm/
|
||||
cp /rmm/web/.env ${tmp_dir}/rmm/env
|
||||
cp /rmm/api/tacticalrmm/tacticalrmm/private/exe/mesh*.exe ${tmp_dir}/rmm/
|
||||
|
||||
@@ -15,6 +15,7 @@ MESH_USER=tactical
|
||||
MESH_PASS=tactical
|
||||
MONGODB_USER=mongouser
|
||||
MONGODB_PASSWORD=mongopass
|
||||
MESH_PERSISTENT_CONFIG=0
|
||||
|
||||
# database settings
|
||||
POSTGRES_USER=postgres
|
||||
|
||||
@@ -9,14 +9,19 @@ set -e
|
||||
: "${MONGODB_HOST:=tactical-mongodb}"
|
||||
: "${MONGODB_PORT:=27017}"
|
||||
: "${NGINX_HOST_IP:=172.20.0.20}"
|
||||
: "${MESH_PERSISTENT_CONFIG:=0}"
|
||||
|
||||
mkdir -p /home/node/app/meshcentral-data
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
|
||||
if [ ! -f "/home/node/app/meshcentral-data/config.json" ] || [[ "${MESH_PERSISTENT_CONFIG}" -eq 0 ]]; then
|
||||
|
||||
encoded_uri=$(node -p "encodeURI('mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@${MONGODB_HOST}:${MONGODB_PORT}')")
|
||||
|
||||
mesh_config="$(cat << EOF
|
||||
{
|
||||
"settings": {
|
||||
"mongodb": "mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@${MONGODB_HOST}:${MONGODB_PORT}",
|
||||
"mongodb": "${encoded_uri}",
|
||||
"Cert": "${MESH_HOST}",
|
||||
"TLSOffload": "${NGINX_HOST_IP}",
|
||||
"RedirPort": 80,
|
||||
@@ -54,11 +59,19 @@ EOF
|
||||
|
||||
echo "${mesh_config}" > /home/node/app/meshcentral-data/config.json
|
||||
|
||||
fi
|
||||
|
||||
node node_modules/meshcentral --createaccount ${MESH_USER} --pass ${MESH_PASS} --email example@example.com
|
||||
node node_modules/meshcentral --adminaccount ${MESH_USER}
|
||||
|
||||
if [ ! -f "${TACTICAL_DIR}/tmp/mesh_token" ]; then
|
||||
node node_modules/meshcentral --logintokenkey > ${TACTICAL_DIR}/tmp/mesh_token
|
||||
mesh_token=$(node node_modules/meshcentral --logintokenkey)
|
||||
|
||||
if [[ ${#mesh_token} -eq 160 ]]; then
|
||||
echo ${mesh_token} > /opt/tactical/tmp/mesh_token
|
||||
else
|
||||
echo "Failed to generate mesh token. Fix the error and restart the mesh container"
|
||||
fi
|
||||
fi
|
||||
|
||||
# wait for nginx container
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM nats:2.2.6-alpine
|
||||
FROM nats:2.3.3-alpine
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# creates python virtual env
|
||||
FROM python:3.9.2-slim AS CREATE_VENV_STAGE
|
||||
FROM python:3.9.6-slim AS CREATE_VENV_STAGE
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
@@ -24,7 +24,7 @@ RUN apt-get update && \
|
||||
|
||||
|
||||
# runtime image
|
||||
FROM python:3.9.2-slim
|
||||
FROM python:3.9.6-slim
|
||||
|
||||
# set env variables
|
||||
ENV VIRTUAL_ENV /opt/venv
|
||||
|
||||
@@ -97,6 +97,7 @@ services:
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
MONGODB_USER: ${MONGODB_USER}
|
||||
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||
MESH_PERSISTENT_CONFIG: ${MESH_PERSISTENT_CONFIG}
|
||||
networks:
|
||||
proxy:
|
||||
aliases:
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
||||
# tactical tactical-frontend tactical-nats tactical-nginx
|
||||
DOCKER_IMAGES="tactical tactical-frontend tactical-nats tactical-nginx tactical-meshcentral"
|
||||
|
||||
cd ..
|
||||
|
||||
46
docs/docs/3rdparty_anydesk.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# AnyDesk
|
||||
|
||||
## AnyDesk Integration
|
||||
|
||||
!!!info
|
||||
You can setup a full automation policy to collect the machine GUID but this example will collect from just one agent for testing purposes.
|
||||
|
||||
From the UI go to **Settings > Global Settings > CUSTOM FIELDS > Agents**
|
||||
|
||||
Add Custom Field</br>
|
||||
**Target** = `Agent`</br>
|
||||
**Name** = `AnyNetID`</br>
|
||||
**Field Type** = `Text`</br>
|
||||
|
||||

|
||||
|
||||
While in Global Settings go to **URL ACTIONS**
|
||||
|
||||
Add a URL Action</br>
|
||||
**Name** = `AnyDesk Control`</br>
|
||||
**Description** = `Connect to a AnyDesk Session`</br>
|
||||
**URL Pattern** =
|
||||
|
||||
```html
|
||||
anydesk:{{agent.AnyNetID}}
|
||||
```
|
||||
|
||||
Navigate to an agent with AnyDesk running (or apply using **Settings > Automation Manager**).</br>
|
||||
Go to Tasks.</br>
|
||||
Add Task</br>
|
||||
**Select Script** = `AnyDesk - Get AnyNetID for client` (this is a builtin script from script library)</br>
|
||||
**Descriptive name of task** = `Collects the AnyNetID for AnyDesk.`</br>
|
||||
**Collector Task** = `CHECKED`</br>
|
||||
**Custom Field to update** = `AnyNetID`</br>
|
||||
|
||||

|
||||
|
||||
Click **Next**</br>
|
||||
Check **Manual**</br>
|
||||
Click **Add Task**
|
||||
|
||||
Right click on the newly created task and click **Run Task Now**.
|
||||
|
||||
Give it a second to execute then right click the agent that you are working with and go to **Run URL Action > AnyDesk Control**
|
||||
|
||||
It launch the session in AnyDesk.
|
||||
34
docs/docs/3rdparty_bitdefender_gravityzone.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# BitDefender GravityZone Deployment
|
||||
|
||||
## How to Deploy BitDefender GravityZone
|
||||
|
||||
From the UI go to **Settings > Global Settings > CUSTOM FIELDS > Clients**
|
||||
|
||||
Add a Custom Field</br>
|
||||
|
||||
First: </br>
|
||||
**Target** = `CLIENTS`</br>
|
||||
**Name** = `bdurl`</br>
|
||||
**Field Type** = `Text`</br>
|
||||
|
||||

|
||||
|
||||
Log into your GravityZone and on the left hand side, select "Packages" under "Network".
|
||||
|
||||

|
||||
|
||||
Select the client you are working with and click "Send Download Links" at the top. </br>
|
||||
|
||||

|
||||
|
||||
Copy the appropriate download link
|
||||
|
||||

|
||||
|
||||
Paste download link into the `bdurl` when you right click your target clients name in the RMM.
|
||||
|
||||

|
||||
|
||||
Right click the Agent you want to deploy to and **Run Script**. Select **BitDefender GravityZone Install** and set timeout for 1800 seconds.
|
||||
|
||||
**Install time will vary based on internet speed and other AV removal by BitDefender BEST deployment**
|
||||
9
docs/docs/3rdparty_grafana.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Adding Grafana to Tactical RMM
|
||||
|
||||
Adding graphical Dashboards to Tactical.
|
||||
|
||||
See <https://github.com/dinger1986/TRMM-Grafana>
|
||||
|
||||

|
||||
|
||||

|
||||
46
docs/docs/3rdparty_teamviewer.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# TeamViewer
|
||||
|
||||
## TeamViewer Integration
|
||||
|
||||
!!!info
|
||||
You can setup a full automation policy to collect the machine GUID but this example will collect from just one agent for testing purposes.
|
||||
|
||||
From the UI go to **Settings > Global Settings > CUSTOM FIELDS > Agents**
|
||||
|
||||
Add Custom Field</br>
|
||||
**Target** = `Agent`</br>
|
||||
**Name** = `TeamViewerClientID`</br>
|
||||
**Field Type** = `Text`</br>
|
||||
|
||||

|
||||
|
||||
While in Global Settings go to **URL ACTIONS**
|
||||
|
||||
Add a URL Action</br>
|
||||
**Name** = `TeamViewer Control`</br>
|
||||
**Description** = `Connect to a Team Viewer Session`</br>
|
||||
**URL Pattern** =
|
||||
|
||||
```html
|
||||
https://start.teamviewer.com/device/{{agent.TeamViewerClientID}}/authorization/password/mode/control
|
||||
```
|
||||
|
||||
Navigate to an agent with TeamViewer running (or apply using **Settings > Automation Manager**).</br>
|
||||
Go to Tasks.</br>
|
||||
Add Task</br>
|
||||
**Select Script** = `TeamViewer - Get ClientID for client` (this is a builtin script from script library)</br>
|
||||
**Descriptive name of task** = `Collects the ClientID for TeamViewer.`</br>
|
||||
**Collector Task** = `CHECKED`</br>
|
||||
**Custom Field to update** = `TeamViewerClientID`</br>
|
||||
|
||||

|
||||
|
||||
Click **Next**</br>
|
||||
Check **Manual**</br>
|
||||
Click **Add Task**
|
||||
|
||||
Right click on the newly created task and click **Run Task Now**.
|
||||
|
||||
Give it a second to execute then right click the agent that you are working with and go to **Run URL Action > TeamViewer Control**
|
||||
|
||||
It launch the session and possibly promt for password in TeamViewer.
|
||||
@@ -24,6 +24,18 @@ This is better
|
||||
|
||||

|
||||
|
||||
## Install VSCode Extensions
|
||||
|
||||
[Remote - Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)
|
||||
|
||||
[Docker](https://marketplace.visualstudio.com/items?itemName=ms-azuretools.vscode-docker)
|
||||
|
||||
## Connect to WSL and clone your Github fork
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
## Create .env file
|
||||
|
||||
Under .devcontainer duplicate
|
||||
@@ -46,7 +58,20 @@ Customize to your tastes (it doesn't need to be internet configured, just add re
|
||||
127.0.0.1 mesh.example.com
|
||||
```
|
||||
|
||||
## View mkdocks live edits in browser
|
||||
## Launch your Dev VM in Docker
|
||||
|
||||
Right-click `docker-compose.yml` and choose `Compose Up`
|
||||
|
||||
Wait, it'll take a while as docker downloads all the modules and gets running.
|
||||
|
||||
## Develop!
|
||||
|
||||
You're operational!
|
||||
|
||||
!!!note
|
||||
Self-signed certs are in your dev environment. Navigate to https://api.example.com and https://rmm.example.com and accept the self signed certs to get rid of errors.
|
||||
|
||||
### View mkdocks live edits in browser
|
||||
|
||||
Change stuff in `/docs/docs/`
|
||||
|
||||
@@ -54,6 +79,7 @@ mkdocs is Exposed on Port: 8005
|
||||
|
||||
Open: [http://rmm.example.com:8005/](http://rmm.example.com:8005/)
|
||||
|
||||
## View django administration
|
||||
### View django administration
|
||||
|
||||
Open: [http://rmm.example.com:8000/admin/](http://rmm.example.com:8000/admin/)
|
||||
|
||||
Open: [http://rmm.example.com:8000/admin/](http://rmm.example.com:8000/admin/)
|
||||
@@ -40,4 +40,30 @@ cd /meshcentral/
|
||||
sudo systemctl stop meshcentral
|
||||
node node_modules/meshcentral --resetaccount <username> --pass <newpassword>
|
||||
sudo systemctl start meshcentral
|
||||
```
|
||||
```
|
||||
|
||||
#### Help! I've been hacked there are weird agents appearing in my Tactical RMM
|
||||
|
||||
No, you haven't.
|
||||
|
||||
1. Your installer was scanned by an antivirus.
|
||||
|
||||
2. It didn't recognize the exe.
|
||||
|
||||
3. You have the option enabled to submit unknown applications for analysis.
|
||||
|
||||

|
||||
|
||||
4. They ran it against their virtualization testing cluster.
|
||||
|
||||
5. You allow anyone to connect to your rmm server (you should look into techniques to hide your server from the internet).
|
||||
|
||||
6. Here are some examples of what that looks like.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
@@ -3,11 +3,13 @@
|
||||
Alerting and notifications can be managed centrally using Alert Templates. All an alert template does is configure the Email, Text and Dashboard alert check boxes on Agents, Checks, and Automated Tasks.
|
||||
|
||||
Using Alert Templates also enables additional feature like:
|
||||
- Periodic notifications if an alert is left unresolved
|
||||
|
||||
- Periodic notifications if an alert is left unresolved
|
||||
- Being able to notify on certain alert severities
|
||||
- Sending notifications when an alert is resolved
|
||||
- Executing scripts when an alert is triggered or resolved
|
||||
|
||||
[Setting up Email Alert Examples](email_alert.md)
|
||||
## Supported Notifications
|
||||
|
||||
- **Email Alerts** - Sends email to configured set of email addresses
|
||||
@@ -25,7 +27,6 @@ Alert severities are configured directly on the Check or Automated Task. When th
|
||||
- Warning
|
||||
- Error
|
||||
|
||||
|
||||
## Adding Alert Templates
|
||||
|
||||
To create an alert template, go to **Settings > Alerts Manager**. Then click **New**
|
||||
@@ -33,12 +34,14 @@ To create an alert template, go to **Settings > Alerts Manager**. Then click **N
|
||||
The available options are:
|
||||
|
||||
### General Settings
|
||||
|
||||
- **Name** - The name that is used to identify the Alert Template in the dashboard
|
||||
- **Email Recipients** - Sets the list of email recipients. If this isn't set the email recipients will global settings will be used.
|
||||
- **From Email** - Sets the From email address of the notification. If this isn't set the From address from global settings is used.
|
||||
- **SMS Recipients** - Sets the list of text recipients. If this isn't set the sms list from global settings is used.
|
||||
|
||||
### Action Settings
|
||||
|
||||
- **Failure Action** - Runs the selected script once on any agent. This is useful for running one-time tasks like sending an http request to an external system to create a ticket.
|
||||
- **Failure Action Args** - Optionally pass in arguments to the failure script.
|
||||
- **Failure Action Timeout** - Sets the timeout for the script.
|
||||
@@ -46,17 +49,25 @@ The available options are:
|
||||
- **Resolved Action Args** - Optionally pass in arguments to the resolved script.
|
||||
- **Resolved Action Timeout** - Sets the timeout for the script.
|
||||
|
||||
#### Run actions only on:
|
||||
- **Agents** - If Enabled, will run script failure/resolved actions on agent overdue alerts else no alert actions will be triggered for agent overdue alerts
|
||||
- **Checks** - If Enabled, will run script failure/resolved actions on check alerts else no alert actions will be triggered check alerts
|
||||
- **Tasks** - If Enabled, will run script failure/resolved actions on automated task alerts else no alert actions will be triggered automated task alerts
|
||||
|
||||
|
||||
### Agent/Check/Task Failure Settings
|
||||
|
||||
- **Email** - When **Enabled**, will send an email notification and override the Email Alert checkbox on the Agent/Check/Task. When **Not Configured**, the Email Alert checkbox on the Agent/Check/Task will take effect. If **Disabled**, no email notifications will be sent and will override any Email alert checkbox on the Agent/Check/Task
|
||||
- **Text** - When **Enabled**, will send a text notification and override the SMS Alert checkbox on the Agent/Check/Task. When **Not Configured**, the SMS Alert checkbox on the Agent/Check/Task will take effect. If **Disabled**, no SMS notifications will be sent and will override any SMS Alert checkbox on the Agent/Check/Task
|
||||
- **Dashboard** - When **Enabled**, will send a dashboard notification and override the Dashboard Alert checkbox on the Agent/Check/Task. When **Not Configured**, the Dashboard Alert checkbox on the Agent/Check/Task will take effect. If **Disabled**, no SMS notifications will be sent and will override any Dashboard Alert checkbox on the Agent/Check/Task
|
||||
- **Alert again if not resolved after (days)** - This sends another notification if the alert isn't resolved after the set amount of days. Set to 0 to disable this
|
||||
- **Alert on severity** - Only applicable to Check and Task alert notifications. This will only send alerts when they are of the configured severity.
|
||||
- **Alert on severity** - Only applicable to Check and Task alert notifications. This will only send alerts when they are of the configured severity.
|
||||
|
||||
!!!info
|
||||
Alert on Severity needs to be configured for check and task notifications to be sent!
|
||||
|
||||
### Agent/Check/Task Resolved Settings
|
||||
|
||||
- **Email** - If enabled, sends an email notification when an alert is resolved
|
||||
- **Text** - If enabled, sends a text messaged when an alert is resolved
|
||||
|
||||
@@ -70,7 +81,7 @@ Alert templates can be configured Globally, through an Automation Policy, or set
|
||||
|
||||
## Alert Template Exclusions
|
||||
|
||||
You can exclude Clients, Sites, and Agents from alert templates. To do this you can:
|
||||
You can exclude Clients, Sites, and Agents from alert templates. To do this you can:
|
||||
|
||||
- right-click on the **Alert Template** in **Alerts Manager** and select **Exclusions**
|
||||
- select the **Alert Exclusions** link in the Alert Template row.
|
||||
@@ -79,7 +90,7 @@ You can also **Exclude Desktops** from the alert template. This is useful if you
|
||||
|
||||
## Alert Template inheritance
|
||||
|
||||
Alerts are applied in the following over. The agent picks the closest matching alert template.
|
||||
Alerts are applied in the following order. The agent picks the closest matching alert template.
|
||||
|
||||
1. Policy w/ Alert Template applied to Site
|
||||
2. Site
|
||||
|
||||
17
docs/docs/functions/database_maintenance.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# Database Maintenance
|
||||
|
||||
Tactical RMM ships with data retention defaults that will work fine for most environments. There are situations, depending on the number of agents and checks configured, that these defaults need to be tweaked to improve performance.
|
||||
|
||||
## Adjusting Data Retention
|
||||
|
||||
In the dashboard, go to **Settings > Global Settings > Retention**
|
||||
|
||||
The options are:
|
||||
|
||||
- **Check History** - Will delete check history older than the days specified (default is 30 days).
|
||||
- **Resolved Alerts** - Will delete alerts that have been resolved older than the days specified (default is disabled).
|
||||
- **Agent History** - Will delete agent command/script history older than the days specified (default is 60 days).
|
||||
- **Debug Logs** - Will delete agent debug logs older than the days specified (default is 30 days)
|
||||
- **Audit Logs** Will delete Tactical RMM audit logs older than the days specified (default is disabled)
|
||||
|
||||
To disable database pruning on a table, set the days to 0.
|
||||
46
docs/docs/functions/email_alert.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# Email Setup
|
||||
|
||||
Under **Settings > Global Settings > Email Alerts**
|
||||
|
||||
## Setting up Tactical RMM Alerts using Open Relay
|
||||
|
||||
MS 365 in this example
|
||||
|
||||
1. Log into Tactical RMM
|
||||
2. Go to Settings
|
||||
3. Go to Global Settings
|
||||
4. Click on Alerts
|
||||
5. Enter the email address (or addresses) you want to receive alerts to eg info@mydomain.com
|
||||
6. Enter the from email address (this will need to be part of your domain on 365, however it doesn’t need a license) eg rmm@mydomain.com
|
||||
7. Go to MXToolbox.com and enter your domain name in, copy the hostname from there and paste into Host
|
||||
8. Change the port to 25
|
||||
9. Click Save
|
||||
10. Login to admin.microsoft.com
|
||||
11. Go to Exchange Admin Centre
|
||||
12. Go to “Connectors” under “Mail Flow”
|
||||
13. Click to + button
|
||||
14. In From: select “Your organisations email server”
|
||||
15. In To: select “Office 365”
|
||||
16. Click Next
|
||||
17. In the Name type in RMM
|
||||
18. Click By Verifying that the IP address……
|
||||
19. Click +
|
||||
20. Enter your IP and Click OK
|
||||
21. Click Next
|
||||
22. Click OK
|
||||
|
||||
## Setting up Tactical RMM Alerts using username & password
|
||||
|
||||
Gmail in this example
|
||||
|
||||
1. Log into Tactical RMM
|
||||
2. Go to Settings
|
||||
3. Go to Global Settings
|
||||
4. Click on Alerts
|
||||
5. Enter the email address (or addresses) you want to receive alerts to eg info@mydomain.com
|
||||
6. Enter the from email address myrmm@gmail.com
|
||||
7. Tick the box “My server requires Authentication”
|
||||
8. Enter your username e.g. myrmm@gmail.com
|
||||
9. Enter your password
|
||||
10. Change the port to 587
|
||||
11. Click Save
|
||||
@@ -24,7 +24,7 @@ In the dashboard, browse to **Settings > Scripts Manager**. Click the **New** bu
|
||||
|
||||
To download a Tactical RMM Script, click on the script in the Script Manager to select it. Then click the **Download Script** button on the top. You can also right-click on the script and select download
|
||||
|
||||
## Community Script
|
||||
## Community Scripts
|
||||
|
||||
These are script that are built into Tactical RMM. They are provided and mantained by the Tactical RMM community. These scripts are updated whenever Tactical RMM is updated and can't be modified or deleted in the dashboard.
|
||||
|
||||
@@ -36,9 +36,12 @@ You can choose to hide community script throughout the dashboard by opening **Sc
|
||||
### Manual run on agent
|
||||
|
||||
In the **Agent Table**, you can right-click on an agent and select **Run Script**. You have the options of:
|
||||
- **Wait for Output** - Runs the script and waits for the script to finish running and displays the output.
|
||||
- **Fire and Forget** - Starts the script and does not wait for output.
|
||||
- **Email Output** - Starts the script and will email the output. Allows for using the default email address in the global settings or adding a new email address.
|
||||
|
||||
- **Wait for Output** - Runs the script and waits for the script to finish running and displays the output.
|
||||
- **Fire and Forget** - Starts the script and does not wait for output.
|
||||
- **Email Output** - Starts the script and will email the output. Allows for using the default email address in the global settings or adding a new email address.
|
||||
- **Save as Note** - Saves the output as a Note that can be views in the agent Notes tab
|
||||
- **Collector** - Saves to output to the specified custom field.
|
||||
|
||||
There is also an option on the agent context menu called **Run Favorited Script**. This will essentially Fire and Forget the script with default args and timeout.
|
||||
|
||||
@@ -108,3 +111,27 @@ Write-Output "Public IP: $PublicIp"
|
||||
Write-Output "Custom Fields: $CustomField"
|
||||
Write-Output "Global: $Global"
|
||||
```
|
||||
|
||||
## Script Snippets
|
||||
|
||||
Script Snippets allow you to create common code blocks or comments and apply them to all of your scripts. This could be initialization code, common error checking, or even code comments.
|
||||
|
||||
### Adding Script Snippets
|
||||
|
||||
In the dashboard, browse to **Settings > Scripts Manager**. Click the **Script Snippets** button.
|
||||
|
||||
- **Name** - This identifies the script snippet in the dashboard
|
||||
- **Description** - Optional description for the script snippet
|
||||
- **Shell** - This sets the language of the script. Available options are:
|
||||
- Powershell
|
||||
- Windows Batch
|
||||
- Python
|
||||
|
||||
### Using Script Snippets
|
||||
|
||||
When editing a script, you can add template tags to the script body that contains the script snippet name. For example, if a script snippet exists with the name "Check WMF", you would put {{Check WMF}} in the script body and the snippet code will be replaced.
|
||||
|
||||
!!!info
|
||||
Everything between {{}} is CaSe sEnSiTive
|
||||
|
||||
The template tags will only be visible when Editing the script. When downloading or viewing the script code the template tags will be replaced with the script snippet code.
|
||||
@@ -1,6 +1,14 @@
|
||||
# How It All Works
|
||||
|
||||
INSERT WIREFRAME GRAPHIC HERE USING <https://www.yworks.com/yed-live/>
|
||||
INSERT WIREFRAME GRAPHICS HERE USING SOMETHING LIKE <https://www.yworks.com/yed-live/>
|
||||
|
||||
1) how nats-django-admin web interface work
|
||||
|
||||
2) Agent installer steps
|
||||
|
||||
3) Agent communication process with server (what ports to which services etc)
|
||||
|
||||
4) Agent checks/tasks and how they work on the workstation/interact with server
|
||||
|
||||
## Server
|
||||
|
||||
@@ -130,7 +138,7 @@ Executes the file (INNO setup exe)
|
||||
|
||||
Files create `c:\Windows\temp\Tacticalxxxx\` folder for install (and log files)
|
||||
|
||||
*****
|
||||
***
|
||||
|
||||
### Windows Update Management
|
||||
|
||||
@@ -142,4 +150,14 @@ AUOptions (REG_DWORD):
|
||||
1: Keep my computer up to date is disabled in Automatic Updates.
|
||||
```
|
||||
|
||||
Uses this Microsoft API to handle updates: [https://docs.microsoft.com/en-us/windows/win32/api/_wua/](https://docs.microsoft.com/en-us/windows/win32/api/_wua/)
|
||||
Uses this Microsoft API to handle updates: [https://docs.microsoft.com/en-us/windows/win32/api/_wua/](https://docs.microsoft.com/en-us/windows/win32/api/_wua/)
|
||||
|
||||
### Log files
|
||||
|
||||
You can find 3 sets of detailed logs at `/rmm/api/tacticalrmm/tacticalrmm/private/log`
|
||||
|
||||
* `error.log` nginx log for all errors on all TRMM URL's: rmm, api and mesh
|
||||
|
||||
* `access.log` nginx log for access auditing on all URL's: rmm, api and mesh (_this is a large file, and should be cleaned periodically_)
|
||||
|
||||
* `django_debug.log` created by django webapp
|
||||
|
||||
|
After Width: | Height: | Size: 57 KiB |
BIN
docs/docs/images/3rdparty_anydesk1.png
Normal file
|
After Width: | Height: | Size: 35 KiB |
BIN
docs/docs/images/3rdparty_anydesk2.png
Normal file
|
After Width: | Height: | Size: 43 KiB |
BIN
docs/docs/images/3rdparty_bdg_CustFieldLink.png
Normal file
|
After Width: | Height: | Size: 32 KiB |
BIN
docs/docs/images/3rdparty_bdg_DownloadLink.png
Normal file
|
After Width: | Height: | Size: 5.2 KiB |
BIN
docs/docs/images/3rdparty_bdg_LinkCopy.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
docs/docs/images/3rdparty_bdg_Packages.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
docs/docs/images/3rdparty_bdg_RmmCustField.png
Normal file
|
After Width: | Height: | Size: 36 KiB |