Compare commits
267 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4942f262f1 | ||
|
|
a20b1a973e | ||
|
|
eae5e00706 | ||
|
|
5c92d4b454 | ||
|
|
38179b9d38 | ||
|
|
8f510dde5a | ||
|
|
be42d56e37 | ||
|
|
c5c8f5fab1 | ||
|
|
3d41d79078 | ||
|
|
3005061a11 | ||
|
|
65ea46f457 | ||
|
|
eca8f32570 | ||
|
|
8d1ef19c61 | ||
|
|
71d87d866b | ||
|
|
c4f88bdce7 | ||
|
|
f722a115b1 | ||
|
|
1583beea7b | ||
|
|
5b388c587b | ||
|
|
e254923167 | ||
|
|
b0dbdd7803 | ||
|
|
aa6ebe0122 | ||
|
|
c5f179bab8 | ||
|
|
e65cb86638 | ||
|
|
a349998640 | ||
|
|
43f60610b8 | ||
|
|
46d042087a | ||
|
|
ee214727f6 | ||
|
|
b4c1ec55ec | ||
|
|
0fdd54f710 | ||
|
|
4f0cdeaec0 | ||
|
|
e5cc38857c | ||
|
|
fe4b9d71c0 | ||
|
|
5c1181e40e | ||
|
|
8b71832bc2 | ||
|
|
8412ed6065 | ||
|
|
207f6cdc7c | ||
|
|
b0b51f5730 | ||
|
|
def6833ef0 | ||
|
|
c528dd3de1 | ||
|
|
544270e35d | ||
|
|
657e029fee | ||
|
|
49469d7689 | ||
|
|
4f0dd452c8 | ||
|
|
3f741eab11 | ||
|
|
190368788f | ||
|
|
8306a3f566 | ||
|
|
988c134c09 | ||
|
|
af0a4d578b | ||
|
|
9bc0abc831 | ||
|
|
41410e99e7 | ||
|
|
deae04d5ff | ||
|
|
7d6eeffd66 | ||
|
|
629858e095 | ||
|
|
dfdb628347 | ||
|
|
6e48b28fc9 | ||
|
|
3ba450e837 | ||
|
|
688ed93500 | ||
|
|
7268ba20a2 | ||
|
|
63d9e73098 | ||
|
|
564c048f90 | ||
|
|
5f801c74d5 | ||
|
|
b405fbc09a | ||
|
|
7a64c2eb49 | ||
|
|
c93cbac3b1 | ||
|
|
8b0f67b8a6 | ||
|
|
0d96129f2d | ||
|
|
54ee12d2b3 | ||
|
|
92fc042103 | ||
|
|
9bb7016fa7 | ||
|
|
3ad56feafb | ||
|
|
14d59c3dec | ||
|
|
443f419770 | ||
|
|
ddbb58755e | ||
|
|
524283b9ff | ||
|
|
fb178d2944 | ||
|
|
52f4ad9403 | ||
|
|
ba0c08ef1f | ||
|
|
9e19b1e04c | ||
|
|
b2118201b1 | ||
|
|
b4346aa056 | ||
|
|
b599f05aab | ||
|
|
93d78a0200 | ||
|
|
449957b2eb | ||
|
|
0a6d44bad3 | ||
|
|
17ceaaa503 | ||
|
|
d70803b416 | ||
|
|
aa414d4702 | ||
|
|
f24e1b91ea | ||
|
|
1df8163090 | ||
|
|
659ddf6a45 | ||
|
|
e110068da4 | ||
|
|
c943f6f936 | ||
|
|
cb1fe7fe54 | ||
|
|
593f1f63cc | ||
|
|
66aa70cf75 | ||
|
|
304be99067 | ||
|
|
9a01ec35f4 | ||
|
|
bfa5b4fba5 | ||
|
|
d2f63ef353 | ||
|
|
50f334425e | ||
|
|
f78212073c | ||
|
|
5c655f5a82 | ||
|
|
6a6446bfcb | ||
|
|
b60a3a5e50 | ||
|
|
02ccbab8e5 | ||
|
|
023ff3f964 | ||
|
|
7c5e8df3b8 | ||
|
|
56fdab260b | ||
|
|
7cce49dc1a | ||
|
|
2dfaafb20b | ||
|
|
6138a5bf54 | ||
|
|
828c67cc00 | ||
|
|
e70cd44e18 | ||
|
|
efa5ac5edd | ||
|
|
788b11e759 | ||
|
|
d049d7a61f | ||
|
|
075c833b58 | ||
|
|
e9309c2a96 | ||
|
|
a592d2b397 | ||
|
|
3ad1805ac0 | ||
|
|
dbc2bab698 | ||
|
|
79eec5c299 | ||
|
|
7754b0c575 | ||
|
|
be4289ce76 | ||
|
|
67f5226270 | ||
|
|
b6d77c581b | ||
|
|
d84bf47d04 | ||
|
|
aba3a7bb9e | ||
|
|
6281736d89 | ||
|
|
94d96f89d3 | ||
|
|
4b55f9dead | ||
|
|
5c6dce94df | ||
|
|
f7d8f9c7f5 | ||
|
|
053df24f9c | ||
|
|
1dc470e434 | ||
|
|
cfd8773267 | ||
|
|
67045cf6c1 | ||
|
|
ddfb9e7239 | ||
|
|
9f6eed5472 | ||
|
|
15a1e2ebcb | ||
|
|
fcfe450b07 | ||
|
|
a69bbb3bc9 | ||
|
|
6d2559cfc1 | ||
|
|
b3a62615f3 | ||
|
|
57f5cca1cb | ||
|
|
6b9851f540 | ||
|
|
36fd203a88 | ||
|
|
3f5cb5d61c | ||
|
|
862fc6a946 | ||
|
|
92c386ac0e | ||
|
|
98a11a3645 | ||
|
|
62be0ed936 | ||
|
|
b7de73fd8a | ||
|
|
e2413f1af2 | ||
|
|
0e77d575c4 | ||
|
|
ba42c5e367 | ||
|
|
6a06734192 | ||
|
|
5e26a406b7 | ||
|
|
b6dd03138d | ||
|
|
cf03ee03ee | ||
|
|
0e665b6bf0 | ||
|
|
e3d0de7313 | ||
|
|
bcf3a543a1 | ||
|
|
b27f17c74a | ||
|
|
75d864771e | ||
|
|
6420060f2a | ||
|
|
c149ae71b9 | ||
|
|
3a49dd034c | ||
|
|
b26d7e82e3 | ||
|
|
415abdf0ce | ||
|
|
f7f6f6ecb2 | ||
|
|
43d54f134a | ||
|
|
0d2606a13b | ||
|
|
1deb10dc88 | ||
|
|
1236d55544 | ||
|
|
ecccf39455 | ||
|
|
8e0316825a | ||
|
|
aa45fa87af | ||
|
|
71e78bd0c5 | ||
|
|
4766477c58 | ||
|
|
d97e49ff2b | ||
|
|
6b9d775cb9 | ||
|
|
e521f580d7 | ||
|
|
25e7cf7db0 | ||
|
|
0cab33787d | ||
|
|
bc6faf817f | ||
|
|
d46ae55863 | ||
|
|
bbd900ab25 | ||
|
|
129ae93e2b | ||
|
|
44dd59fa3f | ||
|
|
ec4e7559b0 | ||
|
|
dce40611cf | ||
|
|
e71b8546f9 | ||
|
|
f827348467 | ||
|
|
f3978343db | ||
|
|
2654a7ea70 | ||
|
|
1068bf4ef7 | ||
|
|
e7fccc97cc | ||
|
|
733e289852 | ||
|
|
29d71a104c | ||
|
|
05200420ad | ||
|
|
eb762d4bfd | ||
|
|
58ace9eda1 | ||
|
|
eeb2623be0 | ||
|
|
cfa242c2fe | ||
|
|
ec0441ccc2 | ||
|
|
ae2782a8fe | ||
|
|
58ff570251 | ||
|
|
7b554b12c7 | ||
|
|
58f7603d4f | ||
|
|
8895994c54 | ||
|
|
de8f7e36d5 | ||
|
|
88d7a50265 | ||
|
|
21e19fc7e5 | ||
|
|
faf4935a69 | ||
|
|
71a1f9d74a | ||
|
|
bd8d523e10 | ||
|
|
60cae0e3ac | ||
|
|
5a342ac012 | ||
|
|
bb8767dfc3 | ||
|
|
fcb2779c15 | ||
|
|
77dd6c1f61 | ||
|
|
8118eef300 | ||
|
|
802d1489fe | ||
|
|
443a029185 | ||
|
|
4ee508fdd0 | ||
|
|
aa5608f7e8 | ||
|
|
cc472b4613 | ||
|
|
764b945ddc | ||
|
|
fd2206ce4c | ||
|
|
48c0ac9f00 | ||
|
|
84eb4fe9ed | ||
|
|
4a5428812c | ||
|
|
023f98a89d | ||
|
|
66893dd0c1 | ||
|
|
25a6666e35 | ||
|
|
19d75309b5 | ||
|
|
11110d65c1 | ||
|
|
a348f58fe2 | ||
|
|
13851dd976 | ||
|
|
2ec37c5da9 | ||
|
|
8c127160de | ||
|
|
2af820de9a | ||
|
|
55fb0bb3a0 | ||
|
|
9f9ecc521f | ||
|
|
dfd01df5ba | ||
|
|
474090698c | ||
|
|
6b71cdeea4 | ||
|
|
581e974236 | ||
|
|
ba3c3a42ce | ||
|
|
c8bc5671c5 | ||
|
|
ff9401a040 | ||
|
|
5e1bc1989f | ||
|
|
a1dc91cd7d | ||
|
|
99f2772bb3 | ||
|
|
e5d0e42655 | ||
|
|
2c914cc374 | ||
|
|
9bceb62381 | ||
|
|
de7518a800 | ||
|
|
304fb63453 | ||
|
|
0f7ef60ca0 | ||
|
|
07c74e4641 | ||
|
|
de7f325cfb | ||
|
|
42cdf70cb4 | ||
|
|
6beb6be131 | ||
|
|
fa4fc2a708 | ||
|
|
2db9758260 |
@@ -26,3 +26,6 @@ POSTGRES_PASS=postgrespass
|
||||
APP_PORT=80
|
||||
API_PORT=80
|
||||
HTTP_PROTOCOL=https
|
||||
DOCKER_NETWORK=172.21.0.0/24
|
||||
DOCKER_NGINX_IP=172.21.0.20
|
||||
NATS_PORTS=4222:4222
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.9.2-slim
|
||||
FROM python:3.9.6-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
@@ -13,12 +13,17 @@ EXPOSE 8000 8383 8005
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# Copy Dev python reqs
|
||||
COPY ./requirements.txt /
|
||||
# Copy nats-api file
|
||||
COPY natsapi/bin/nats-api /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/nats-api
|
||||
|
||||
# Copy Docker Entrypoint
|
||||
COPY ./entrypoint.sh /
|
||||
# Copy dev python reqs
|
||||
COPY .devcontainer/requirements.txt /
|
||||
|
||||
# Copy docker entrypoint.sh
|
||||
COPY .devcontainer/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm
|
||||
|
||||
@@ -6,8 +6,8 @@ services:
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
context: ..
|
||||
dockerfile: .devcontainer/api.dockerfile
|
||||
command: ["tactical-api"]
|
||||
environment:
|
||||
API_PORT: ${API_PORT}
|
||||
@@ -46,7 +46,7 @@ services:
|
||||
API_PORT: ${API_PORT}
|
||||
DEV: 1
|
||||
ports:
|
||||
- "4222:4222"
|
||||
- "${NATS_PORTS}"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
@@ -67,7 +67,7 @@ services:
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
MONGODB_USER: ${MONGODB_USER}
|
||||
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||
NGINX_HOST_IP: 172.21.0.20
|
||||
NGINX_HOST_IP: ${DOCKER_NGINX_IP}
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
@@ -127,9 +127,6 @@ services:
|
||||
init-dev:
|
||||
container_name: trmm-init-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
restart: on-failure
|
||||
command: ["tactical-init-dev"]
|
||||
environment:
|
||||
@@ -156,9 +153,6 @@ services:
|
||||
celery-dev:
|
||||
container_name: trmm-celery-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celery-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
@@ -174,9 +168,6 @@ services:
|
||||
celerybeat-dev:
|
||||
container_name: trmm-celerybeat-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celerybeat-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
@@ -192,9 +183,6 @@ services:
|
||||
websockets-dev:
|
||||
container_name: trmm-websockets-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-websockets-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
@@ -223,7 +211,7 @@ services:
|
||||
API_PORT: ${API_PORT}
|
||||
networks:
|
||||
dev:
|
||||
ipv4_address: 172.21.0.20
|
||||
ipv4_address: ${DOCKER_NGINX_IP}
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
@@ -234,9 +222,6 @@ services:
|
||||
container_name: trmm-mkdocs-dev
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-mkdocs-dev"]
|
||||
ports:
|
||||
- "8005:8005"
|
||||
@@ -258,4 +243,4 @@ networks:
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.21.0.0/24
|
||||
- subnet: ${DOCKER_NETWORK}
|
||||
|
||||
@@ -114,6 +114,7 @@ EOF
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_chocos
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py reload_nats
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_installer_user
|
||||
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
|
||||
@@ -3,6 +3,7 @@ asyncio-nats-client
|
||||
celery
|
||||
channels
|
||||
channels_redis
|
||||
django-ipware
|
||||
Django
|
||||
django-cors-headers
|
||||
django-rest-knox
|
||||
|
||||
@@ -9,7 +9,7 @@ Tactical RMM is a remote monitoring & management tool for Windows computers, bui
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||
|
||||
@@ -35,4 +35,4 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
||||
|
||||
## Installation / Backup / Restore / Usage
|
||||
|
||||
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
import uuid
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Creates the installer user"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
if User.objects.filter(is_installer_user=True).exists():
|
||||
return
|
||||
|
||||
User.objects.create_user( # type: ignore
|
||||
username=uuid.uuid4().hex,
|
||||
is_installer_user=True,
|
||||
password=User.objects.make_random_password(60), # type: ignore
|
||||
)
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-17 04:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0020_role_can_manage_roles'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_view_core_settings',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-28 05:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0021_role_can_view_core_settings'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='clear_search_when_switching',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-30 03:22
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0022_user_clear_search_when_switching'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='is_installer_user',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-20 20:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0023_user_is_installer_user'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='last_login_ip',
|
||||
field=models.GenericIPAddressField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0024_user_last_login_ip'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -46,6 +46,9 @@ class User(AbstractUser, BaseAuditModel):
|
||||
)
|
||||
client_tree_splitter = models.PositiveIntegerField(default=11)
|
||||
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||
clear_search_when_switching = models.BooleanField(default=True)
|
||||
is_installer_user = models.BooleanField(default=False)
|
||||
last_login_ip = models.GenericIPAddressField(default=None, blank=True, null=True)
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
@@ -71,7 +74,7 @@ class User(AbstractUser, BaseAuditModel):
|
||||
return UserSerializer(user).data
|
||||
|
||||
|
||||
class Role(models.Model):
|
||||
class Role(BaseAuditModel):
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
is_superuser = models.BooleanField(default=False)
|
||||
|
||||
@@ -90,6 +93,7 @@ class Role(models.Model):
|
||||
|
||||
# core
|
||||
can_manage_notes = models.BooleanField(default=False)
|
||||
can_view_core_settings = models.BooleanField(default=False)
|
||||
can_edit_core_settings = models.BooleanField(default=False)
|
||||
can_do_server_maint = models.BooleanField(default=False)
|
||||
can_code_sign = models.BooleanField(default=False)
|
||||
@@ -137,6 +141,13 @@ class Role(models.Model):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(role):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import RoleAuditSerializer
|
||||
|
||||
return RoleAuditSerializer(role).data
|
||||
|
||||
@staticmethod
|
||||
def perms():
|
||||
return [
|
||||
@@ -153,6 +164,7 @@ class Role(models.Model):
|
||||
"can_run_scripts",
|
||||
"can_run_bulk",
|
||||
"can_manage_notes",
|
||||
"can_view_core_settings",
|
||||
"can_edit_core_settings",
|
||||
"can_do_server_maint",
|
||||
"can_code_sign",
|
||||
|
||||
@@ -16,6 +16,7 @@ class UserUISerializer(ModelSerializer):
|
||||
"client_tree_sort",
|
||||
"client_tree_splitter",
|
||||
"loading_bar_color",
|
||||
"clear_search_when_switching",
|
||||
]
|
||||
|
||||
|
||||
@@ -30,6 +31,7 @@ class UserSerializer(ModelSerializer):
|
||||
"email",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"last_login_ip",
|
||||
"role",
|
||||
]
|
||||
|
||||
@@ -56,3 +58,9 @@ class RoleSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class RoleAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = "__all__"
|
||||
|
||||
@@ -280,6 +280,7 @@ class TestUserAction(TacticalTestCase):
|
||||
"client_tree_sort": "alpha",
|
||||
"client_tree_splitter": 14,
|
||||
"loading_bar_color": "green",
|
||||
"clear_search_when_switching": False,
|
||||
}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -3,23 +3,23 @@ from django.conf import settings
|
||||
from django.contrib.auth import login
|
||||
from django.db import IntegrityError
|
||||
from django.shortcuts import get_object_or_404
|
||||
from ipware import get_client_ip
|
||||
from knox.views import LoginView as KnoxLoginView
|
||||
from logs.models import AuditLog
|
||||
from rest_framework import status
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import User, Role
|
||||
from .models import Role, User
|
||||
from .permissions import AccountsPerms, RolesPerms
|
||||
from .serializers import (
|
||||
RoleSerializer,
|
||||
TOTPSetupSerializer,
|
||||
UserSerializer,
|
||||
UserUISerializer,
|
||||
RoleSerializer,
|
||||
)
|
||||
|
||||
|
||||
@@ -40,7 +40,9 @@ class CheckCreds(KnoxLoginView):
|
||||
# check credentials
|
||||
serializer = AuthTokenSerializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
AuditLog.audit_user_failed_login(request.data["username"])
|
||||
AuditLog.audit_user_failed_login(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
user = serializer.validated_data["user"]
|
||||
@@ -76,10 +78,20 @@ class LoginView(KnoxLoginView):
|
||||
|
||||
if valid:
|
||||
login(request, user)
|
||||
AuditLog.audit_user_login_successful(request.data["username"])
|
||||
|
||||
# save ip information
|
||||
client_ip, is_routable = get_client_ip(request)
|
||||
user.last_login_ip = client_ip
|
||||
user.save()
|
||||
|
||||
AuditLog.audit_user_login_successful(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return super(LoginView, self).post(request, format=None)
|
||||
else:
|
||||
AuditLog.audit_user_failed_twofactor(request.data["username"])
|
||||
AuditLog.audit_user_failed_twofactor(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@@ -87,7 +99,14 @@ class GetAddUsers(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
def get(self, request):
|
||||
users = User.objects.filter(agent=None)
|
||||
search = request.GET.get("search", None)
|
||||
|
||||
if search:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False).filter(
|
||||
username__icontains=search
|
||||
)
|
||||
else:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False)
|
||||
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction, AgentHistory
|
||||
|
||||
admin.site.register(Agent)
|
||||
admin.site.register(RecoveryAction)
|
||||
admin.site.register(Note)
|
||||
admin.site.register(AgentCustomField)
|
||||
admin.site.register(AgentHistory)
|
||||
|
||||
23
api/tacticalrmm/agents/migrations/0037_auto_20210627_0014.py
Normal file
23
api/tacticalrmm/agents/migrations/0037_auto_20210627_0014.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-27 00:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0036_agent_block_policy_inheritance'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='has_patches_pending',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='pending_actions_count',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
27
api/tacticalrmm/agents/migrations/0038_agenthistory.py
Normal file
27
api/tacticalrmm/agents/migrations/0038_agenthistory.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-06 02:01
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0037_auto_20210627_0014'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AgentHistory',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('time', models.DateTimeField(auto_now_add=True)),
|
||||
('type', models.CharField(choices=[('task_run', 'Task Run'), ('script_run', 'Script Run'), ('cmd_run', 'CMD Run')], default='cmd_run', max_length=50)),
|
||||
('command', models.TextField(blank=True, null=True)),
|
||||
('status', models.CharField(choices=[('success', 'Success'), ('failure', 'Failure')], default='success', max_length=50)),
|
||||
('username', models.CharField(default='system', max_length=50)),
|
||||
('results', models.TextField(blank=True, null=True)),
|
||||
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='history', to='agents.agent')),
|
||||
],
|
||||
),
|
||||
]
|
||||
25
api/tacticalrmm/agents/migrations/0039_auto_20210714_0738.py
Normal file
25
api/tacticalrmm/agents/migrations/0039_auto_20210714_0738.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 3.2.5 on 2021-07-14 07:38
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0008_script_guid'),
|
||||
('agents', '0038_agenthistory'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agenthistory',
|
||||
name='script',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='history', to='scripts.script'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='agenthistory',
|
||||
name='script_results',
|
||||
field=models.JSONField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -16,14 +16,12 @@ from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from nats.aio.client import Client as NATS
|
||||
from nats.aio.errors import ErrTimeout
|
||||
from packaging import version as pyver
|
||||
|
||||
from core.models import TZ_CHOICES, CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
|
||||
|
||||
class Agent(BaseAuditModel):
|
||||
@@ -64,6 +62,8 @@ class Agent(BaseAuditModel):
|
||||
)
|
||||
maintenance_mode = models.BooleanField(default=False)
|
||||
block_policy_inheritance = models.BooleanField(default=False)
|
||||
pending_actions_count = models.PositiveIntegerField(default=0)
|
||||
has_patches_pending = models.BooleanField(default=False)
|
||||
alert_template = models.ForeignKey(
|
||||
"alerts.AlertTemplate",
|
||||
related_name="agents",
|
||||
@@ -89,16 +89,18 @@ class Agent(BaseAuditModel):
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
# get old agent if exists
|
||||
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
old_agent = Agent.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Agent, self).save(old_model=old_agent, *args, **kwargs)
|
||||
|
||||
# check if new agent has been created
|
||||
# or check if policy have changed on agent
|
||||
# or if site has changed on agent and if so generate-policies
|
||||
# or if agent was changed from server or workstation
|
||||
if (
|
||||
not old_agent
|
||||
or (old_agent and old_agent.policy != self.policy)
|
||||
or (old_agent.site != self.site)
|
||||
or (old_agent.monitoring_type != self.monitoring_type)
|
||||
or (old_agent.block_policy_inheritance != self.block_policy_inheritance)
|
||||
):
|
||||
self.generate_checks_from_policies()
|
||||
@@ -119,7 +121,7 @@ class Agent(BaseAuditModel):
|
||||
else:
|
||||
from core.models import CoreSettings
|
||||
|
||||
return CoreSettings.objects.first().default_time_zone
|
||||
return CoreSettings.objects.first().default_time_zone # type: ignore
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
@@ -161,10 +163,6 @@ class Agent(BaseAuditModel):
|
||||
else:
|
||||
return "offline"
|
||||
|
||||
@property
|
||||
def has_patches_pending(self):
|
||||
return self.winupdates.filter(action="approve").filter(installed=False).exists() # type: ignore
|
||||
|
||||
@property
|
||||
def checks(self):
|
||||
total, passing, failing, warning, info = 0, 0, 0, 0, 0
|
||||
@@ -325,6 +323,7 @@ class Agent(BaseAuditModel):
|
||||
full: bool = False,
|
||||
wait: bool = False,
|
||||
run_on_any: bool = False,
|
||||
history_pk: int = 0,
|
||||
) -> Any:
|
||||
|
||||
from scripts.models import Script
|
||||
@@ -343,6 +342,9 @@ class Agent(BaseAuditModel):
|
||||
},
|
||||
}
|
||||
|
||||
if history_pk != 0 and pyver.parse(self.version) >= pyver.parse("1.6.0"):
|
||||
data["id"] = history_pk
|
||||
|
||||
running_agent = self
|
||||
if run_on_any:
|
||||
nats_ping = {"func": "ping"}
|
||||
@@ -411,6 +413,12 @@ class Agent(BaseAuditModel):
|
||||
update.action = "approve"
|
||||
update.save(update_fields=["action"])
|
||||
|
||||
DebugLog.info(
|
||||
agent=self,
|
||||
log_type="windows_updates",
|
||||
message=f"Approving windows updates on {self.hostname}",
|
||||
)
|
||||
|
||||
# returns agent policy merged with a client or site specific policy
|
||||
def get_patch_policy(self):
|
||||
|
||||
@@ -445,8 +453,8 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.server_policy
|
||||
and core_settings.server_policy.winupdatepolicy.exists()
|
||||
core_settings.server_policy # type: ignore
|
||||
and core_settings.server_policy.winupdatepolicy.exists() # type: ignore
|
||||
):
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
@@ -454,7 +462,7 @@ class Agent(BaseAuditModel):
|
||||
and not site.block_policy_inheritance
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get()
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get() # type: ignore
|
||||
|
||||
elif self.monitoring_type == "workstation":
|
||||
# check agent policy first which should override client or site policy
|
||||
@@ -483,8 +491,8 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.workstation_policy
|
||||
and core_settings.workstation_policy.winupdatepolicy.exists()
|
||||
core_settings.workstation_policy # type: ignore
|
||||
and core_settings.workstation_policy.winupdatepolicy.exists() # type: ignore
|
||||
):
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
@@ -493,7 +501,7 @@ class Agent(BaseAuditModel):
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = (
|
||||
core_settings.workstation_policy.winupdatepolicy.get()
|
||||
core_settings.workstation_policy.winupdatepolicy.get() # type: ignore
|
||||
)
|
||||
|
||||
# if policy still doesn't exist return the agent patch policy
|
||||
@@ -608,35 +616,35 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# check if alert template is applied globally and return
|
||||
if (
|
||||
core.alert_template
|
||||
and core.alert_template.is_active
|
||||
core.alert_template # type: ignore
|
||||
and core.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.alert_template)
|
||||
templates.append(core.alert_template) # type: ignore
|
||||
|
||||
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
||||
if (
|
||||
self.monitoring_type == "server"
|
||||
and core.server_policy
|
||||
and core.server_policy.alert_template
|
||||
and core.server_policy.alert_template.is_active
|
||||
and core.server_policy # type: ignore
|
||||
and core.server_policy.alert_template # type: ignore
|
||||
and core.server_policy.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.server_policy.alert_template)
|
||||
templates.append(core.server_policy.alert_template) # type: ignore
|
||||
if (
|
||||
self.monitoring_type == "workstation"
|
||||
and core.workstation_policy
|
||||
and core.workstation_policy.alert_template
|
||||
and core.workstation_policy.alert_template.is_active
|
||||
and core.workstation_policy # type: ignore
|
||||
and core.workstation_policy.alert_template # type: ignore
|
||||
and core.workstation_policy.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.workstation_policy.alert_template)
|
||||
templates.append(core.workstation_policy.alert_template) # type: ignore
|
||||
|
||||
# go through the templates and return the first one that isn't excluded
|
||||
for template in templates:
|
||||
@@ -739,7 +747,7 @@ class Agent(BaseAuditModel):
|
||||
try:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(agent=self, log_type="agent_issues", message=e)
|
||||
ret = str(e)
|
||||
|
||||
await nc.close()
|
||||
@@ -752,12 +760,9 @@ class Agent(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(agent):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import AgentEditSerializer
|
||||
from .serializers import AgentAuditSerializer
|
||||
|
||||
ret = AgentEditSerializer(agent).data
|
||||
del ret["all_timezones"]
|
||||
del ret["client"]
|
||||
return ret
|
||||
return AgentAuditSerializer(agent).data
|
||||
|
||||
def delete_superseded_updates(self):
|
||||
try:
|
||||
@@ -772,7 +777,7 @@ class Agent(BaseAuditModel):
|
||||
# skip if no version info is available therefore nothing to parse
|
||||
try:
|
||||
vers = [
|
||||
re.search(r"\(Version(.*?)\)", i).group(1).strip()
|
||||
re.search(r"\(Version(.*?)\)", i).group(1).strip() # type: ignore
|
||||
for i in titles
|
||||
]
|
||||
sorted_vers = sorted(vers, key=LooseVersion)
|
||||
@@ -807,7 +812,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
CORE.send_mail( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
(
|
||||
f"Data has not been received from client {self.client.name}, "
|
||||
@@ -822,7 +827,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
CORE.send_mail( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
(
|
||||
f"Data has been received from client {self.client.name}, "
|
||||
@@ -837,7 +842,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
CORE.send_sms( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
@@ -846,7 +851,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
CORE.send_sms( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
@@ -928,3 +933,57 @@ class AgentCustomField(models.Model):
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif self.field.type == "multiple":
|
||||
self.multiple_value = value.split(",")
|
||||
self.save()
|
||||
elif self.field.type == "checkbox":
|
||||
self.bool_value = bool(value)
|
||||
self.save()
|
||||
|
||||
|
||||
AGENT_HISTORY_TYPES = (
|
||||
("task_run", "Task Run"),
|
||||
("script_run", "Script Run"),
|
||||
("cmd_run", "CMD Run"),
|
||||
)
|
||||
|
||||
AGENT_HISTORY_STATUS = (("success", "Success"), ("failure", "Failure"))
|
||||
|
||||
|
||||
class AgentHistory(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="history",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
time = models.DateTimeField(auto_now_add=True)
|
||||
type = models.CharField(
|
||||
max_length=50, choices=AGENT_HISTORY_TYPES, default="cmd_run"
|
||||
)
|
||||
command = models.TextField(null=True, blank=True)
|
||||
status = models.CharField(
|
||||
max_length=50, choices=AGENT_HISTORY_STATUS, default="success"
|
||||
)
|
||||
username = models.CharField(max_length=50, default="system")
|
||||
results = models.TextField(null=True, blank=True)
|
||||
script = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="history",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_results = models.JSONField(null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.agent.hostname} - {self.type}"
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from clients.serializers import ClientSerializer
|
||||
from rest_framework import serializers
|
||||
from tacticalrmm.utils import get_default_timezone
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, AgentCustomField, Note
|
||||
from .models import Agent, AgentCustomField, Note, AgentHistory
|
||||
|
||||
|
||||
class AgentSerializer(serializers.ModelSerializer):
|
||||
# for vue
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
status = serializers.ReadOnlyField()
|
||||
cpu_model = serializers.ReadOnlyField()
|
||||
@@ -45,8 +44,6 @@ class AgentOverdueActionSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class AgentTableSerializer(serializers.ModelSerializer):
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
pending_actions = serializers.SerializerMethodField()
|
||||
status = serializers.ReadOnlyField()
|
||||
checks = serializers.ReadOnlyField()
|
||||
last_seen = serializers.SerializerMethodField()
|
||||
@@ -69,9 +66,6 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"always_alert": obj.alert_template.agent_always_alert,
|
||||
}
|
||||
|
||||
def get_pending_actions(self, obj):
|
||||
return obj.pendingactions.filter(status="pending").count()
|
||||
|
||||
def get_last_seen(self, obj) -> str:
|
||||
if obj.time_zone is not None:
|
||||
agent_tz = pytz.timezone(obj.time_zone)
|
||||
@@ -103,8 +97,8 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"patches_pending",
|
||||
"pending_actions",
|
||||
"has_patches_pending",
|
||||
"pending_actions_count",
|
||||
"status",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
@@ -165,6 +159,7 @@ class AgentEditSerializer(serializers.ModelSerializer):
|
||||
"offline_time",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_dashboard_alert",
|
||||
"all_timezones",
|
||||
"winupdatepolicy",
|
||||
"policy",
|
||||
@@ -173,11 +168,6 @@ class AgentEditSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class WinAgentSerializer(serializers.ModelSerializer):
|
||||
# for the windows agent
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
status = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = "__all__"
|
||||
@@ -211,3 +201,22 @@ class NotesSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = ["hostname", "pk", "notes"]
|
||||
|
||||
|
||||
class AgentHistorySerializer(serializers.ModelSerializer):
|
||||
time = serializers.SerializerMethodField(read_only=True)
|
||||
script_name = serializers.ReadOnlyField(source="script.name")
|
||||
|
||||
class Meta:
|
||||
model = AgentHistory
|
||||
fields = "__all__"
|
||||
|
||||
def get_time(self, history):
|
||||
timezone = get_default_timezone()
|
||||
return history.time.astimezone(timezone).strftime("%m %d %Y %H:%M:%S")
|
||||
|
||||
|
||||
class AgentAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
exclude = ["disks", "services", "wmi_detail"]
|
||||
|
||||
@@ -5,19 +5,17 @@ import urllib.parse
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
from alerts.models import Alert
|
||||
from core.models import CodeSignToken, CoreSettings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from logs.models import DebugLog, PendingAction
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CodeSignToken, CoreSettings
|
||||
from logs.models import PendingAction
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.utils import run_nats_api_cmd
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str:
|
||||
@@ -30,8 +28,10 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(
|
||||
f"Unable to determine arch on {agent.hostname}. Skipping agent update."
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to determine arch on {agent.hostname}({agent.pk}). Skipping agent update.",
|
||||
)
|
||||
return "noarch"
|
||||
|
||||
@@ -78,7 +78,7 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
|
||||
@app.task
|
||||
def force_code_sign(pks: list[int]) -> None:
|
||||
try:
|
||||
token = CodeSignToken.objects.first().token
|
||||
token = CodeSignToken.objects.first().tokenv # type:ignore
|
||||
except:
|
||||
return
|
||||
|
||||
@@ -93,7 +93,7 @@ def force_code_sign(pks: list[int]) -> None:
|
||||
@app.task
|
||||
def send_agent_update_task(pks: list[int]) -> None:
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
codesigntoken = CodeSignToken.objects.first().token # type:ignore
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
@@ -108,11 +108,11 @@ def send_agent_update_task(pks: list[int]) -> None:
|
||||
@app.task
|
||||
def auto_self_agent_update_task() -> None:
|
||||
core = CoreSettings.objects.first()
|
||||
if not core.agent_auto_update:
|
||||
if not core.agent_auto_update: # type:ignore
|
||||
return
|
||||
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
codesigntoken = CodeSignToken.objects.first().token # type:ignore
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
@@ -232,14 +232,24 @@ def run_script_email_results_task(
|
||||
nats_timeout: int,
|
||||
emails: list[str],
|
||||
args: list[str] = [],
|
||||
history_pk: int = 0,
|
||||
):
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk, args=args, full=True, timeout=nats_timeout, wait=True
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
full=True,
|
||||
timeout=nats_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
if r == "timeout":
|
||||
logger.error(f"{agent.hostname} timed out running script.")
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"{agent.hostname}({agent.pk}) timed out running script.",
|
||||
)
|
||||
return
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
@@ -255,28 +265,32 @@ def run_script_email_results_task(
|
||||
|
||||
msg = EmailMessage()
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = CORE.smtp_from_email
|
||||
msg["From"] = CORE.smtp_from_email # type:ignore
|
||||
|
||||
if emails:
|
||||
msg["To"] = ", ".join(emails)
|
||||
else:
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients)
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients) # type:ignore
|
||||
|
||||
msg.set_content(body)
|
||||
|
||||
try:
|
||||
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
|
||||
if CORE.smtp_requires_auth:
|
||||
with smtplib.SMTP(
|
||||
CORE.smtp_host, CORE.smtp_port, timeout=20 # type:ignore
|
||||
) as server: # type:ignore
|
||||
if CORE.smtp_requires_auth: # type:ignore
|
||||
server.ehlo()
|
||||
server.starttls()
|
||||
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
|
||||
server.login(
|
||||
CORE.smtp_host_user, CORE.smtp_host_password # type:ignore
|
||||
) # type:ignore
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
else:
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(message=e)
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -307,19 +321,73 @@ def clear_faults_task(older_than_days: int) -> None:
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
def monitor_agents_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ids = [i.agent_id for i in agents if i.status != "online"]
|
||||
run_nats_api_cmd("monitor", ids)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ids = [i.agent_id for i in agents if i.status == "online"]
|
||||
run_nats_api_cmd("wmi", ids)
|
||||
run_nats_api_cmd("wmi", ids, timeout=45)
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_checkin_task() -> None:
|
||||
run_nats_api_cmd("checkin", timeout=30)
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_getinfo_task() -> None:
|
||||
run_nats_api_cmd("agentinfo", timeout=30)
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_agent_history(older_than_days: int) -> str:
|
||||
from .models import AgentHistory
|
||||
|
||||
AgentHistory.objects.filter(
|
||||
time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_agents_task() -> None:
|
||||
q = Agent.objects.prefetch_related("pendingactions", "autotasks").only(
|
||||
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
agents = [
|
||||
i
|
||||
for i in q
|
||||
if pyver.parse(i.version) >= pyver.parse("1.6.0") and i.status == "online"
|
||||
]
|
||||
for agent in agents:
|
||||
# change agent update pending status to completed if agent has just updated
|
||||
if (
|
||||
pyver.parse(agent.version) == pyver.parse(settings.LATEST_AGENT_VER)
|
||||
and agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists()
|
||||
):
|
||||
agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).update(status="completed")
|
||||
|
||||
# sync scheduled tasks
|
||||
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
|
||||
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
|
||||
|
||||
for task in tasks:
|
||||
if task.sync_status == "pendingdeletion":
|
||||
task.delete_task_on_agent()
|
||||
elif task.sync_status == "initial":
|
||||
task.modify_task_on_agent()
|
||||
elif task.sync_status == "notsynced":
|
||||
task.create_task_on_agent()
|
||||
|
||||
# handles any alerting actions
|
||||
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
||||
try:
|
||||
Alert.handle_alert_resolve(agent)
|
||||
except:
|
||||
continue
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
import json
|
||||
import os
|
||||
from itertools import cycle
|
||||
from django.utils import timezone as djangotime
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from logs.models import PendingAction
|
||||
from model_bakery import baker
|
||||
from packaging import version as pyver
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, AgentCustomField
|
||||
from .serializers import AgentSerializer
|
||||
from .models import Agent, AgentCustomField, AgentHistory
|
||||
from .serializers import AgentHistorySerializer, AgentSerializer
|
||||
from .tasks import auto_self_agent_update_task
|
||||
|
||||
|
||||
@@ -306,7 +305,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"shell": "cmd",
|
||||
"timeout": 30,
|
||||
}
|
||||
mock_ret.return_value = "nt authority\system"
|
||||
mock_ret.return_value = "nt authority\\system"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIsInstance(r.data, str) # type: ignore
|
||||
@@ -437,7 +436,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 1)
|
||||
mesh_recovery = RecoveryAction.objects.first()
|
||||
self.assertEqual(mesh_recovery.mode, "mesh")
|
||||
self.assertEqual(mesh_recovery.mode, "mesh") # type: ignore
|
||||
nats_cmd.reset_mock()
|
||||
RecoveryAction.objects.all().delete()
|
||||
|
||||
@@ -472,8 +471,8 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 1)
|
||||
cmd_recovery = RecoveryAction.objects.first()
|
||||
self.assertEqual(cmd_recovery.mode, "command")
|
||||
self.assertEqual(cmd_recovery.command, "shutdown /r /t 10 /f")
|
||||
self.assertEqual(cmd_recovery.mode, "command") # type: ignore
|
||||
self.assertEqual(cmd_recovery.command, "shutdown /r /t 10 /f") # type: ignore
|
||||
|
||||
def test_agents_agent_detail(self):
|
||||
url = f"/agents/{self.agent.pk}/agentdetail/"
|
||||
@@ -770,6 +769,9 @@ class TestAgentViews(TacticalTestCase):
|
||||
@patch("agents.tasks.run_script_email_results_task.delay")
|
||||
@patch("agents.models.Agent.run_script")
|
||||
def test_run_script(self, run_script, email_task):
|
||||
from .models import AgentCustomField, Note
|
||||
from clients.models import ClientCustomField, SiteCustomField
|
||||
|
||||
run_script.return_value = "ok"
|
||||
url = "/agents/runscript/"
|
||||
script = baker.make_recipe("scripts.script")
|
||||
@@ -777,7 +779,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
# test wait
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"script": script.pk,
|
||||
"output": "wait",
|
||||
"args": [],
|
||||
"timeout": 15,
|
||||
@@ -786,18 +788,18 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk, args=[], timeout=18, wait=True
|
||||
scriptpk=script.pk, args=[], timeout=18, wait=True, history_pk=0
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
# test email default
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"script": script.pk,
|
||||
"output": "email",
|
||||
"args": ["abc", "123"],
|
||||
"timeout": 15,
|
||||
"emailmode": "default",
|
||||
"emailMode": "default",
|
||||
"emails": ["admin@example.com", "bob@example.com"],
|
||||
}
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -812,7 +814,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
email_task.reset_mock()
|
||||
|
||||
# test email overrides
|
||||
data["emailmode"] = "custom"
|
||||
data["emailMode"] = "custom"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
email_task.assert_called_with(
|
||||
@@ -826,7 +828,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
# test fire and forget
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"script": script.pk,
|
||||
"output": "forget",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
@@ -835,8 +837,138 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk, args=["hello", "world"], timeout=25
|
||||
scriptpk=script.pk, args=["hello", "world"], timeout=25, history_pk=0
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
# test collector
|
||||
|
||||
# save to agent custom field
|
||||
custom_field = baker.make("core.CustomField", model="agent")
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "collector",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"custom_field": custom_field.id, # type: ignore
|
||||
"save_all_output": True,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(
|
||||
AgentCustomField.objects.get(agent=self.agent.pk, field=custom_field).value,
|
||||
"ok",
|
||||
)
|
||||
|
||||
# save to site custom field
|
||||
custom_field = baker.make("core.CustomField", model="site")
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "collector",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"custom_field": custom_field.id, # type: ignore
|
||||
"save_all_output": False,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(
|
||||
SiteCustomField.objects.get(
|
||||
site=self.agent.site.pk, field=custom_field
|
||||
).value,
|
||||
"ok",
|
||||
)
|
||||
|
||||
# save to client custom field
|
||||
custom_field = baker.make("core.CustomField", model="client")
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "collector",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"custom_field": custom_field.id, # type: ignore
|
||||
"save_all_output": False,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(
|
||||
ClientCustomField.objects.get(
|
||||
client=self.agent.client.pk, field=custom_field
|
||||
).value,
|
||||
"ok",
|
||||
)
|
||||
|
||||
# test save to note
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "note",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(Note.objects.get(agent=self.agent).note, "ok")
|
||||
|
||||
def test_get_agent_history(self):
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
history = baker.make("agents.AgentHistory", agent=agent, _quantity=30)
|
||||
url = f"/agents/history/{agent.id}/"
|
||||
|
||||
# test agent not found
|
||||
r = self.client.get("/agents/history/500/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
# test pulling data
|
||||
r = self.client.get(url, format="json")
|
||||
data = AgentHistorySerializer(history, many=True).data
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, data) # type:ignore
|
||||
|
||||
|
||||
class TestAgentViewsNew(TacticalTestCase):
|
||||
@@ -1048,3 +1180,25 @@ class TestAgentTasks(TacticalTestCase):
|
||||
|
||||
r = auto_self_agent_update_task.s().apply()
|
||||
self.assertEqual(agent_update.call_count, 33)
|
||||
|
||||
def test_agent_history_prune_task(self):
|
||||
from .tasks import prune_agent_history
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
history = baker.make(
|
||||
"agents.AgentHistory",
|
||||
agent=agent,
|
||||
_quantity=50,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for item in history: # type: ignore
|
||||
item.time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
item.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_agent_history(30)
|
||||
|
||||
self.assertEqual(AgentHistory.objects.filter(agent=agent).count(), 6)
|
||||
|
||||
@@ -29,4 +29,5 @@ urlpatterns = [
|
||||
path("bulk/", views.bulk),
|
||||
path("maintenance/", views.agent_maintenance),
|
||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||
path("history/<int:pk>/", views.AgentHistoryView.as_view()),
|
||||
]
|
||||
|
||||
@@ -8,7 +8,6 @@ import time
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
@@ -17,14 +16,14 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from core.models import CoreSettings
|
||||
from logs.models import AuditLog, PendingAction
|
||||
from logs.models import AuditLog, DebugLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction, AgentHistory
|
||||
from .permissions import (
|
||||
EditAgentPerms,
|
||||
EvtLogPerms,
|
||||
@@ -42,6 +41,7 @@ from .permissions import (
|
||||
from .serializers import (
|
||||
AgentCustomFieldSerializer,
|
||||
AgentEditSerializer,
|
||||
AgentHistorySerializer,
|
||||
AgentHostnameSerializer,
|
||||
AgentOverdueActionSerializer,
|
||||
AgentSerializer,
|
||||
@@ -51,8 +51,6 @@ from .serializers import (
|
||||
)
|
||||
from .tasks import run_script_email_results_task, send_agent_update_task
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view()
|
||||
def get_agent_versions(request):
|
||||
@@ -115,7 +113,7 @@ def uninstall(request):
|
||||
def edit_agent(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||
|
||||
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer = AgentEditSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer.is_valid(raise_exception=True)
|
||||
a_serializer.save()
|
||||
|
||||
@@ -160,17 +158,21 @@ def meshcentral(request, pk):
|
||||
core = CoreSettings.objects.first()
|
||||
|
||||
token = agent.get_login_token(
|
||||
key=core.mesh_token, user=f"user//{core.mesh_username}"
|
||||
key=core.mesh_token, user=f"user//{core.mesh_username}" # type:ignore
|
||||
)
|
||||
|
||||
if token == "err":
|
||||
return notify_error("Invalid mesh token")
|
||||
|
||||
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31" # type:ignore
|
||||
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31" # type:ignore
|
||||
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31" # type:ignore
|
||||
|
||||
AuditLog.audit_mesh_session(username=request.user.username, hostname=agent.hostname)
|
||||
AuditLog.audit_mesh_session(
|
||||
username=request.user.username,
|
||||
agent=agent,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
ret = {
|
||||
"hostname": agent.hostname,
|
||||
@@ -248,6 +250,16 @@ def send_raw_cmd(request):
|
||||
"shell": request.data["shell"],
|
||||
},
|
||||
}
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="cmd_run",
|
||||
command=request.data["cmd"],
|
||||
username=request.user.username[:50],
|
||||
)
|
||||
data["id"] = hist.pk
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
|
||||
if r == "timeout":
|
||||
@@ -255,9 +267,10 @@ def send_raw_cmd(request):
|
||||
|
||||
AuditLog.audit_raw_command(
|
||||
username=request.user.username,
|
||||
hostname=agent.hostname,
|
||||
agent=agent,
|
||||
cmd=request.data["cmd"],
|
||||
shell=request.data["shell"],
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
return Response(r)
|
||||
@@ -302,6 +315,8 @@ class AgentsTableList(APIView):
|
||||
"last_logged_in_user",
|
||||
"time_zone",
|
||||
"maintenance_mode",
|
||||
"pending_actions_count",
|
||||
"has_patches_pending",
|
||||
)
|
||||
ctx = {"default_tz": get_default_timezone()}
|
||||
serializer = AgentTableSerializer(queryset, many=True, context=ctx)
|
||||
@@ -388,6 +403,7 @@ class Reboot(APIView):
|
||||
@permission_classes([IsAuthenticated, InstallAgentPerms])
|
||||
def install_agent(request):
|
||||
from knox.models import AuthToken
|
||||
from accounts.models import User
|
||||
|
||||
from agents.utils import get_winagent_url
|
||||
|
||||
@@ -413,8 +429,10 @@ def install_agent(request):
|
||||
)
|
||||
download_url = get_winagent_url(arch)
|
||||
|
||||
installer_user = User.objects.filter(is_installer_user=True).first()
|
||||
|
||||
_, token = AuthToken.objects.create(
|
||||
user=request.user, expiry=dt.timedelta(hours=request.data["expires"])
|
||||
user=installer_user, expiry=dt.timedelta(hours=request.data["expires"])
|
||||
)
|
||||
|
||||
if request.data["installMethod"] == "exe":
|
||||
@@ -503,7 +521,7 @@ def install_agent(request):
|
||||
try:
|
||||
os.remove(ps1)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
DebugLog.error(message=str(e))
|
||||
|
||||
with open(ps1, "w") as f:
|
||||
f.write(text)
|
||||
@@ -561,26 +579,41 @@ def recover(request):
|
||||
@permission_classes([IsAuthenticated, RunScriptPerms])
|
||||
def run_script(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
script = get_object_or_404(Script, pk=request.data["script"])
|
||||
output = request.data["output"]
|
||||
args = request.data["args"]
|
||||
req_timeout = int(request.data["timeout"]) + 3
|
||||
|
||||
AuditLog.audit_script_run(
|
||||
username=request.user.username,
|
||||
hostname=agent.hostname,
|
||||
agent=agent,
|
||||
script=script.name,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
history_pk = 0
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="script_run",
|
||||
script=script,
|
||||
username=request.user.username[:50],
|
||||
)
|
||||
history_pk = hist.pk
|
||||
|
||||
if output == "wait":
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk, args=args, timeout=req_timeout, wait=True
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
timeout=req_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
return Response(r)
|
||||
|
||||
elif output == "email":
|
||||
emails = (
|
||||
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||
[] if request.data["emailMode"] == "default" else request.data["emails"]
|
||||
)
|
||||
run_script_email_results_task.delay(
|
||||
agentpk=agent.pk,
|
||||
@@ -589,8 +622,51 @@ def run_script(request):
|
||||
emails=emails,
|
||||
args=args,
|
||||
)
|
||||
elif output == "collector":
|
||||
from core.models import CustomField
|
||||
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
timeout=req_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
|
||||
custom_field = CustomField.objects.get(pk=request.data["custom_field"])
|
||||
|
||||
if custom_field.model == "agent":
|
||||
field = custom_field.get_or_create_field_value(agent)
|
||||
elif custom_field.model == "client":
|
||||
field = custom_field.get_or_create_field_value(agent.client)
|
||||
elif custom_field.model == "site":
|
||||
field = custom_field.get_or_create_field_value(agent.site)
|
||||
else:
|
||||
return notify_error("Custom Field was invalid")
|
||||
|
||||
value = (
|
||||
r.strip()
|
||||
if request.data["save_all_output"]
|
||||
else r.strip().split("\n")[-1].strip()
|
||||
)
|
||||
|
||||
field.save_to_field(value)
|
||||
return Response(r)
|
||||
elif output == "note":
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
timeout=req_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
|
||||
Note.objects.create(agent=agent, user=request.user, note=r)
|
||||
return Response(r)
|
||||
else:
|
||||
agent.run_script(scriptpk=script.pk, args=args, timeout=req_timeout)
|
||||
agent.run_script(
|
||||
scriptpk=script.pk, args=args, timeout=req_timeout, history_pk=history_pk
|
||||
)
|
||||
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
|
||||
@@ -663,7 +739,7 @@ class GetEditDeleteNote(APIView):
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAuthenticated, RunBulkPerms])
|
||||
def bulk(request):
|
||||
if request.data["target"] == "agents" and not request.data["agentPKs"]:
|
||||
if request.data["target"] == "agents" and not request.data["agents"]:
|
||||
return notify_error("Must select at least 1 agent")
|
||||
|
||||
if request.data["target"] == "client":
|
||||
@@ -671,7 +747,7 @@ def bulk(request):
|
||||
elif request.data["target"] == "site":
|
||||
q = Agent.objects.filter(site_id=request.data["site"])
|
||||
elif request.data["target"] == "agents":
|
||||
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
q = Agent.objects.filter(pk__in=request.data["agents"])
|
||||
elif request.data["target"] == "all":
|
||||
q = Agent.objects.only("pk", "monitoring_type")
|
||||
else:
|
||||
@@ -684,29 +760,48 @@ def bulk(request):
|
||||
|
||||
agents: list[int] = [agent.pk for agent in q]
|
||||
|
||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||
if not agents:
|
||||
return notify_error("No agents where found meeting the selected criteria")
|
||||
|
||||
AuditLog.audit_bulk_action(
|
||||
request.user,
|
||||
request.data["mode"],
|
||||
request.data,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
if request.data["mode"] == "command":
|
||||
handle_bulk_command_task.delay(
|
||||
agents, request.data["cmd"], request.data["shell"], request.data["timeout"]
|
||||
agents,
|
||||
request.data["cmd"],
|
||||
request.data["shell"],
|
||||
request.data["timeout"],
|
||||
request.user.username[:50],
|
||||
run_on_offline=request.data["offlineAgents"],
|
||||
)
|
||||
return Response(f"Command will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "script":
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
script = get_object_or_404(Script, pk=request.data["script"])
|
||||
handle_bulk_script_task.delay(
|
||||
script.pk, agents, request.data["args"], request.data["timeout"]
|
||||
script.pk,
|
||||
agents,
|
||||
request.data["args"],
|
||||
request.data["timeout"],
|
||||
request.user.username[:50],
|
||||
)
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "install":
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["mode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(agents)
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
elif request.data["mode"] == "patch":
|
||||
|
||||
if request.data["patchMode"] == "install":
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["patchMode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(agents)
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
@@ -741,3 +836,11 @@ class WMI(APIView):
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class AgentHistoryView(APIView):
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
history = AgentHistory.objects.filter(agent=agent)
|
||||
|
||||
return Response(AgentHistorySerializer(history, many=True).data)
|
||||
|
||||
33
api/tacticalrmm/alerts/migrations/0007_auto_20210721_0423.py
Normal file
33
api/tacticalrmm/alerts/migrations/0007_auto_20210721_0423.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0006_auto_20210217_1736'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/alerts/migrations/0008_auto_20210721_1757.py
Normal file
28
api/tacticalrmm/alerts/migrations/0008_auto_20210721_1757.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 17:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0007_auto_20210721_0423'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='check_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='task_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/alerts/migrations/0009_auto_20210721_1810.py
Normal file
28
api/tacticalrmm/alerts/migrations/0009_auto_20210721_1810.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 18:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0008_auto_20210721_1757'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='check_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='task_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -3,19 +3,18 @@ from __future__ import annotations
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
SEVERITY_CHOICES = [
|
||||
("info", "Informational"),
|
||||
@@ -173,6 +172,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.agent_always_email
|
||||
always_text = alert_template.agent_always_text
|
||||
alert_interval = alert_template.agent_periodic_alert_days
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_availability_alert(instance)
|
||||
@@ -209,6 +209,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.check_always_email
|
||||
always_text = alert_template.check_always_text
|
||||
alert_interval = alert_template.check_periodic_alert_days
|
||||
run_script_action = alert_template.check_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_check_alert(instance)
|
||||
@@ -242,6 +243,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.task_always_email
|
||||
always_text = alert_template.task_always_text
|
||||
alert_interval = alert_template.task_periodic_alert_days
|
||||
run_script_action = alert_template.task_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_task_alert(instance)
|
||||
@@ -295,7 +297,7 @@ class Alert(models.Model):
|
||||
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||
|
||||
# check if any scripts should be run
|
||||
if alert_template and alert_template.action and not alert.action_run:
|
||||
if alert_template and alert_template.action and run_script_action and not alert.action_run: # type: ignore
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
@@ -314,8 +316,10 @@ class Alert(models.Model):
|
||||
alert.action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname} failure alert"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -345,6 +349,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.agent_email_on_resolved
|
||||
text_on_resolved = alert_template.agent_text_on_resolved
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
|
||||
elif isinstance(instance, Check):
|
||||
from checks.tasks import (
|
||||
@@ -363,6 +368,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.check_email_on_resolved
|
||||
text_on_resolved = alert_template.check_text_on_resolved
|
||||
run_script_action = alert_template.check_script_actions
|
||||
|
||||
elif isinstance(instance, AutomatedTask):
|
||||
from autotasks.tasks import (
|
||||
@@ -381,6 +387,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.task_email_on_resolved
|
||||
text_on_resolved = alert_template.task_text_on_resolved
|
||||
run_script_action = alert_template.task_script_actions
|
||||
|
||||
else:
|
||||
return
|
||||
@@ -403,6 +410,7 @@ class Alert(models.Model):
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.resolved_action
|
||||
and run_script_action # type: ignore
|
||||
and not alert.resolved_action_run
|
||||
):
|
||||
r = agent.run_script(
|
||||
@@ -425,8 +433,10 @@ class Alert(models.Model):
|
||||
alert.resolved_action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
||||
)
|
||||
|
||||
def parse_script_args(self, args: list[str]):
|
||||
@@ -451,7 +461,7 @@ class Alert(models.Model):
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(log_type="scripting", message=e)
|
||||
continue
|
||||
|
||||
else:
|
||||
@@ -460,7 +470,7 @@ class Alert(models.Model):
|
||||
return temp_args
|
||||
|
||||
|
||||
class AlertTemplate(models.Model):
|
||||
class AlertTemplate(BaseAuditModel):
|
||||
name = models.CharField(max_length=100)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
@@ -517,6 +527,7 @@ class AlertTemplate(models.Model):
|
||||
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
agent_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# check alert settings
|
||||
check_email_alert_severity = ArrayField(
|
||||
@@ -540,6 +551,7 @@ class AlertTemplate(models.Model):
|
||||
check_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
check_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# task alert settings
|
||||
task_email_alert_severity = ArrayField(
|
||||
@@ -563,6 +575,7 @@ class AlertTemplate(models.Model):
|
||||
task_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
task_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# exclusion settings
|
||||
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
||||
@@ -581,6 +594,13 @@ class AlertTemplate(models.Model):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(alert_template):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import AlertTemplateAuditSerializer
|
||||
|
||||
return AlertTemplateAuditSerializer(alert_template).data
|
||||
|
||||
@property
|
||||
def has_agent_settings(self) -> bool:
|
||||
return (
|
||||
|
||||
@@ -119,3 +119,9 @@ class AlertTemplateRelationSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class AlertTemplateAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from alerts.models import Alert
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def unsnooze_alerts() -> str:
|
||||
from .models import Alert
|
||||
|
||||
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
|
||||
snoozed=False, snooze_until=None
|
||||
@@ -22,3 +21,14 @@ def cache_agents_alert_template():
|
||||
agent.set_alert_template()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_resolved_alerts(older_than_days: int) -> str:
|
||||
from .models import Alert
|
||||
|
||||
Alert.objects.filter(resolved=True).filter(
|
||||
alert_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from core.models import CoreSettings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import AutomatedTask
|
||||
from core.models import CoreSettings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
from .serializers import (
|
||||
@@ -330,8 +329,8 @@ class TestAlertsViews(TacticalTestCase):
|
||||
baker.make("clients.Site", alert_template=alert_template, _quantity=3)
|
||||
baker.make("automation.Policy", alert_template=alert_template)
|
||||
core = CoreSettings.objects.first()
|
||||
core.alert_template = alert_template
|
||||
core.save()
|
||||
core.alert_template = alert_template # type: ignore
|
||||
core.save() # type: ignore
|
||||
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/related/" # type: ignore
|
||||
|
||||
@@ -403,16 +402,16 @@ class TestAlertTasks(TacticalTestCase):
|
||||
# assign first Alert Template as to a policy and apply it as default
|
||||
policy.alert_template = alert_templates[0] # type: ignore
|
||||
policy.save() # type: ignore
|
||||
core.workstation_policy = policy
|
||||
core.server_policy = policy
|
||||
core.save()
|
||||
core.workstation_policy = policy # type: ignore
|
||||
core.server_policy = policy # type: ignore
|
||||
core.save() # type: ignore
|
||||
|
||||
self.assertEquals(server.set_alert_template().pk, alert_templates[0].pk) # type: ignore
|
||||
self.assertEquals(workstation.set_alert_template().pk, alert_templates[0].pk) # type: ignore
|
||||
|
||||
# assign second Alert Template to as default alert template
|
||||
core.alert_template = alert_templates[1] # type: ignore
|
||||
core.save()
|
||||
core.save() # type: ignore
|
||||
|
||||
self.assertEquals(workstation.set_alert_template().pk, alert_templates[1].pk) # type: ignore
|
||||
self.assertEquals(server.set_alert_template().pk, alert_templates[1].pk) # type: ignore
|
||||
@@ -514,6 +513,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
)
|
||||
|
||||
from alerts.models import Alert
|
||||
|
||||
agent_dashboard_alert = baker.make_recipe("agents.overdue_agent")
|
||||
@@ -727,7 +727,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
send_email,
|
||||
sleep,
|
||||
):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from checks.models import Check
|
||||
from checks.tasks import (
|
||||
handle_check_email_alert_task,
|
||||
@@ -736,6 +735,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
handle_resolved_check_sms_alert_task,
|
||||
)
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
|
||||
# create test data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_no_settings = baker.make_recipe("agents.agent")
|
||||
@@ -1011,7 +1012,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
send_email,
|
||||
sleep,
|
||||
):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import (
|
||||
handle_resolved_task_email_alert,
|
||||
@@ -1020,6 +1020,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
handle_task_sms_alert,
|
||||
)
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
|
||||
# create test data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_no_settings = baker.make_recipe("agents.agent")
|
||||
@@ -1272,17 +1274,17 @@ class TestAlertTasks(TacticalTestCase):
|
||||
)
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
core.smtp_host = "test.test.com"
|
||||
core.smtp_port = 587
|
||||
core.smtp_recipients = ["recipient@test.com"]
|
||||
core.twilio_account_sid = "test"
|
||||
core.twilio_auth_token = "1234123412341234"
|
||||
core.sms_alert_recipients = ["+1234567890"]
|
||||
core.smtp_host = "test.test.com" # type: ignore
|
||||
core.smtp_port = 587 # type: ignore
|
||||
core.smtp_recipients = ["recipient@test.com"] # type: ignore
|
||||
core.twilio_account_sid = "test" # type: ignore
|
||||
core.twilio_auth_token = "1234123412341234" # type: ignore
|
||||
core.sms_alert_recipients = ["+1234567890"] # type: ignore
|
||||
|
||||
# test sending email with alert template settings
|
||||
core.send_mail("Test", "Test", alert_template=alert_template)
|
||||
core.send_mail("Test", "Test", alert_template=alert_template) # type: ignore
|
||||
|
||||
core.send_sms("Test", alert_template=alert_template)
|
||||
core.send_sms("Test", alert_template=alert_template) # type: ignore
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.tasks.agent_outage_sms_task.delay")
|
||||
@@ -1315,6 +1317,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"alerts.AlertTemplate",
|
||||
is_active=True,
|
||||
agent_always_alert=True,
|
||||
agent_script_actions=False,
|
||||
action=failure_action,
|
||||
action_timeout=30,
|
||||
resolved_action=resolved_action,
|
||||
@@ -1328,6 +1331,14 @@ class TestAlertTasks(TacticalTestCase):
|
||||
|
||||
agent_outages_task()
|
||||
|
||||
# should not have been called since agent_script_actions is set to False
|
||||
nats_cmd.assert_not_called()
|
||||
|
||||
alert_template.agent_script_actions = True # type: ignore
|
||||
alert_template.save() # type: ignore
|
||||
|
||||
agent_outages_task()
|
||||
|
||||
# this is what data should be
|
||||
data = {
|
||||
"func": "runscriptfull",
|
||||
@@ -1340,14 +1351,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# Setup cmd mock
|
||||
success = {
|
||||
"retcode": 0,
|
||||
"stdout": "success!",
|
||||
"stderr": "",
|
||||
"execution_time": 5.0000,
|
||||
}
|
||||
|
||||
nats_cmd.side_effect = ["pong", success]
|
||||
|
||||
# make sure script run results were stored
|
||||
@@ -1398,3 +1401,36 @@ class TestAlertTasks(TacticalTestCase):
|
||||
["-Parameter", f"-Another '{alert.id}'"], # type: ignore
|
||||
alert.parse_script_args(args=args), # type: ignore
|
||||
)
|
||||
|
||||
def test_prune_resolved_alerts(self):
|
||||
from .tasks import prune_resolved_alerts
|
||||
|
||||
# setup data
|
||||
resolved_alerts = baker.make(
|
||||
"alerts.Alert",
|
||||
resolved=True,
|
||||
_quantity=25,
|
||||
)
|
||||
|
||||
alerts = baker.make(
|
||||
"alerts.Alert",
|
||||
resolved=False,
|
||||
_quantity=25,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for alert in resolved_alerts: # type: ignore
|
||||
alert.alert_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
alert.save()
|
||||
days = days + 5
|
||||
|
||||
days = 0
|
||||
for alert in alerts: # type: ignore
|
||||
alert.alert_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
alert.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_resolved_alerts(30)
|
||||
|
||||
self.assertEqual(Alert.objects.count(), 31)
|
||||
|
||||
@@ -20,4 +20,5 @@ urlpatterns = [
|
||||
path("superseded/", views.SupersededWinUpdate.as_view()),
|
||||
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
|
||||
path("<str:agentid>/recovery/", views.AgentRecovery.as_view()),
|
||||
path("<int:pk>/<str:agentid>/histresult/", views.AgentHistoryResult.as_view()),
|
||||
]
|
||||
|
||||
@@ -6,7 +6,6 @@ from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.authtoken.models import Token
|
||||
@@ -15,20 +14,18 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from agents.models import Agent, AgentCustomField
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from agents.models import Agent, AgentHistory
|
||||
from agents.serializers import WinAgentSerializer, AgentHistorySerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
from checks.models import Check
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from checks.utils import bytes2human
|
||||
from logs.models import PendingAction
|
||||
from logs.models import PendingAction, DebugLog
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class CheckIn(APIView):
|
||||
|
||||
@@ -36,6 +33,10 @@ class CheckIn(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request):
|
||||
"""
|
||||
!!! DEPRECATED AS OF AGENT 1.6.0 !!!
|
||||
Endpoint be removed in a future release
|
||||
"""
|
||||
from alerts.models import Alert
|
||||
|
||||
updated = False
|
||||
@@ -182,7 +183,11 @@ class WinUpdates(APIView):
|
||||
|
||||
if reboot:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="windows_updates",
|
||||
message=f"{agent.hostname} is rebooting after updates were installed.",
|
||||
)
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
@@ -350,13 +355,12 @@ class TaskRunner(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
_ = get_object_or_404(Agent, agent_id=agentid)
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
return Response(TaskGOGetSerializer(task).data)
|
||||
|
||||
def patch(self, request, pk, agentid):
|
||||
from alerts.models import Alert
|
||||
from logs.models import AuditLog
|
||||
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
@@ -371,38 +375,7 @@ class TaskRunner(APIView):
|
||||
if task.custom_field:
|
||||
if not task.stderr:
|
||||
|
||||
if AgentCustomField.objects.filter(
|
||||
field=task.custom_field, agent=task.agent
|
||||
).exists():
|
||||
agent_field = AgentCustomField.objects.get(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
else:
|
||||
agent_field = AgentCustomField.objects.create(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
|
||||
# get last line of stdout
|
||||
value = (
|
||||
new_task.stdout
|
||||
if task.collector_all_output
|
||||
else new_task.stdout.split("\n")[-1].strip()
|
||||
)
|
||||
|
||||
if task.custom_field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
agent_field.string_value = value
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "multiple":
|
||||
agent_field.multiple_value = value.split(",")
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "checkbox":
|
||||
agent_field.bool_value = bool(value)
|
||||
agent_field.save()
|
||||
task.save_collector_results()
|
||||
|
||||
status = "passing"
|
||||
else:
|
||||
@@ -419,15 +392,6 @@ class TaskRunner(APIView):
|
||||
else:
|
||||
Alert.handle_alert_failure(new_task)
|
||||
|
||||
AuditLog.objects.create(
|
||||
username=agent.hostname,
|
||||
agent=agent.hostname,
|
||||
object_type="agent",
|
||||
action="task_run",
|
||||
message=f"Scheduled Task {task.name} was run on {agent.hostname}",
|
||||
after_value=AutomatedTask.serialize(new_task),
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -518,6 +482,7 @@ class NewAgent(APIView):
|
||||
action="agent_install",
|
||||
message=f"{request.user} installed new agent {agent.hostname}",
|
||||
after_value=Agent.serialize(agent),
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
return Response(
|
||||
@@ -622,3 +587,16 @@ class AgentRecovery(APIView):
|
||||
reload_nats()
|
||||
|
||||
return Response(ret)
|
||||
|
||||
|
||||
class AgentHistoryResult(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request, agentid, pk):
|
||||
_ = get_object_or_404(Agent, agent_id=agentid)
|
||||
hist = get_object_or_404(AgentHistory, pk=pk)
|
||||
s = AgentHistorySerializer(instance=hist, data=request.data, partial=True)
|
||||
s.is_valid(raise_exception=True)
|
||||
s.save()
|
||||
return Response("ok")
|
||||
|
||||
@@ -33,7 +33,7 @@ class Policy(BaseAuditModel):
|
||||
|
||||
# get old policy if exists
|
||||
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
super(Policy, self).save(old_model=old_policy, *args, **kwargs)
|
||||
|
||||
# generate agent checks only if active and enforced were changed
|
||||
if old_policy:
|
||||
@@ -50,7 +50,7 @@ class Policy(BaseAuditModel):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
super(Policy, self).delete(*args, **kwargs)
|
||||
|
||||
generate_agent_checks_task.delay(agents=agents, create_tasks=True)
|
||||
|
||||
@@ -126,9 +126,9 @@ class Policy(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(policy):
|
||||
# serializes the policy and returns json
|
||||
from .serializers import PolicySerializer
|
||||
from .serializers import PolicyAuditSerializer
|
||||
|
||||
return PolicySerializer(policy).data
|
||||
return PolicyAuditSerializer(policy).data
|
||||
|
||||
@staticmethod
|
||||
def cascade_policy_tasks(agent):
|
||||
|
||||
@@ -83,8 +83,15 @@ class PolicyCheckSerializer(ModelSerializer):
|
||||
class AutoTasksFieldSerializer(ModelSerializer):
|
||||
assigned_check = PolicyCheckSerializer(read_only=True)
|
||||
script = ReadOnlyField(source="script.id")
|
||||
custom_field = ReadOnlyField(source="custom_field.id")
|
||||
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
depth = 1
|
||||
|
||||
|
||||
class PolicyAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Policy
|
||||
fields = "__all__"
|
||||
|
||||
@@ -6,19 +6,15 @@ from typing import List
|
||||
|
||||
import pytz
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import DateTimeField
|
||||
from django.db.utils import DatabaseError
|
||||
from django.utils import timezone as djangotime
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from packaging import version as pyver
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
RUN_TIME_DAY_CHOICES = [
|
||||
(0, "Monday"),
|
||||
(1, "Tuesday"),
|
||||
@@ -195,9 +191,9 @@ class AutomatedTask(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(task):
|
||||
# serializes the task and returns json
|
||||
from .serializers import TaskSerializer
|
||||
from .serializers import TaskAuditSerializer
|
||||
|
||||
return TaskSerializer(task).data
|
||||
return TaskAuditSerializer(task).data
|
||||
|
||||
def create_policy_task(self, agent=None, policy=None, assigned_check=None):
|
||||
|
||||
@@ -254,7 +250,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
elif self.task_type == "runonce":
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(agent.timezone)
|
||||
agent_tz = pytz.timezone(agent.timezone) # type: ignore
|
||||
task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone(
|
||||
pytz.utc
|
||||
)
|
||||
@@ -280,7 +276,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
},
|
||||
}
|
||||
|
||||
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse(
|
||||
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse( # type: ignore
|
||||
"1.4.7"
|
||||
):
|
||||
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||
@@ -301,19 +297,25 @@ class AutomatedTask(BaseAuditModel):
|
||||
else:
|
||||
return "error"
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) # type: ignore
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "initial"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in."
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in.", # type: ignore
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully created")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} was successfully created", # type: ignore
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -333,19 +335,25 @@ class AutomatedTask(BaseAuditModel):
|
||||
"enabled": self.enabled,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) # type: ignore
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "notsynced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to modify scheduled task {self.name} on {agent.hostname}. It will try again on next agent checkin"
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to modify scheduled task {self.name} on {agent.hostname}({agent.pk}). It will try again on next agent checkin", # type: ignore
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully modified")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} was successfully modified", # type: ignore
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -362,7 +370,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": self.win_task_name},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10)) # type: ignore
|
||||
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
self.sync_status = "pendingdeletion"
|
||||
@@ -372,13 +380,19 @@ class AutomatedTask(BaseAuditModel):
|
||||
except DatabaseError:
|
||||
pass
|
||||
|
||||
logger.warning(
|
||||
f"{agent.hostname} task {self.name} will be deleted on next checkin"
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} will be deleted on next checkin", # type: ignore
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.delete()
|
||||
logger.info(f"{agent.hostname} task {self.name} was deleted")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname}({agent.pk}) task {self.name} was deleted", # type: ignore
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -391,9 +405,20 @@ class AutomatedTask(BaseAuditModel):
|
||||
.first()
|
||||
)
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False))
|
||||
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False)) # type: ignore
|
||||
return "ok"
|
||||
|
||||
def save_collector_results(self):
|
||||
|
||||
agent_field = self.custom_field.get_or_create_field_value(self.agent)
|
||||
|
||||
value = (
|
||||
self.stdout.strip()
|
||||
if self.collector_all_output
|
||||
else self.stdout.strip().split("\n")[-1].strip()
|
||||
)
|
||||
agent_field.save_to_field(value)
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
return (
|
||||
self.dashboard_alert
|
||||
@@ -413,9 +438,9 @@ class AutomatedTask(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
# Format of Email sent when Task has email alert
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
@@ -424,16 +449,15 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, self.agent.alert_template)
|
||||
CORE.send_mail(subject, body, self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_sms(self):
|
||||
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
# Format of SMS sent when Task has SMS alert
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
@@ -442,7 +466,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_resolved_email(self):
|
||||
from core.models import CoreSettings
|
||||
@@ -454,7 +478,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_resolved_sms(self):
|
||||
from core.models import CoreSettings
|
||||
@@ -465,4 +489,4 @@ class AutomatedTask(BaseAuditModel):
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
@@ -84,3 +84,9 @@ class TaskRunnerPatchSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class TaskAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
from logging import log
|
||||
import random
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from logs.models import DebugLog
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@app.task
|
||||
def create_win_task_schedule(pk):
|
||||
@@ -53,12 +51,20 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
|
||||
logger.info(f"Orphaned task cleanup initiated on {agent.hostname}.")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Orphaned task cleanup initiated on {agent.hostname}.",
|
||||
)
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
|
||||
|
||||
if not isinstance(r, list) and not r: # empty list
|
||||
logger.error(f"Unable to clean up scheduled tasks on {agent.hostname}: {r}")
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to clean up scheduled tasks on {agent.hostname}: {r}",
|
||||
)
|
||||
return "notlist"
|
||||
|
||||
agent_task_names = list(agent.autotasks.values_list("win_task_name", flat=True))
|
||||
@@ -83,13 +89,23 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
}
|
||||
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if ret != "ok":
|
||||
logger.error(
|
||||
f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}",
|
||||
)
|
||||
else:
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Removed orphaned task {task} from {agent.hostname}",
|
||||
)
|
||||
|
||||
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Orphaned task cleanup finished on {agent.hostname}",
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
|
||||
@@ -12,10 +12,6 @@ from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
CHECK_TYPE_CHOICES = [
|
||||
("diskspace", "Disk Space Check"),
|
||||
@@ -475,9 +471,9 @@ class Check(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(check):
|
||||
# serializes the check and returns json
|
||||
from .serializers import CheckSerializer
|
||||
from .serializers import CheckAuditSerializer
|
||||
|
||||
return CheckSerializer(check).data
|
||||
return CheckAuditSerializer(check).data
|
||||
|
||||
# for policy diskchecks
|
||||
@staticmethod
|
||||
|
||||
@@ -6,6 +6,7 @@ from autotasks.models import AutomatedTask
|
||||
from scripts.serializers import ScriptCheckSerializer, ScriptSerializer
|
||||
|
||||
from .models import Check, CheckHistory
|
||||
from scripts.models import Script
|
||||
|
||||
|
||||
class AssignedTaskField(serializers.ModelSerializer):
|
||||
@@ -159,6 +160,15 @@ class AssignedTaskCheckRunnerField(serializers.ModelSerializer):
|
||||
class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
# only send data needed for agent to run a check
|
||||
script = ScriptCheckSerializer(read_only=True)
|
||||
script_args = serializers.SerializerMethodField()
|
||||
|
||||
def get_script_args(self, obj):
|
||||
if obj.check_type != "script":
|
||||
return []
|
||||
|
||||
return Script.parse_script_args(
|
||||
agent=obj.agent, shell=obj.script.shell, args=obj.script_args
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
@@ -210,3 +220,9 @@ class CheckHistorySerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CheckHistory
|
||||
fields = ("x", "y", "results")
|
||||
|
||||
|
||||
class CheckAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Check
|
||||
fields = "__all__"
|
||||
|
||||
@@ -33,13 +33,17 @@ class Client(BaseAuditModel):
|
||||
blank=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kw):
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old client if exists
|
||||
old_client = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kw)
|
||||
old_client = Client.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Client, self).save(
|
||||
old_model=old_client,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_client:
|
||||
@@ -50,7 +54,6 @@ class Client(BaseAuditModel):
|
||||
old_client.block_policy_inheritance != self.block_policy_inheritance
|
||||
)
|
||||
):
|
||||
|
||||
generate_agent_checks_task.delay(
|
||||
client=self.pk,
|
||||
create_tasks=True,
|
||||
@@ -87,12 +90,20 @@ class Client(BaseAuditModel):
|
||||
"offline_time",
|
||||
)
|
||||
.filter(site__client=self)
|
||||
.prefetch_related("agentchecks")
|
||||
.prefetch_related("agentchecks", "autotasks")
|
||||
)
|
||||
|
||||
data = {"error": False, "warning": False}
|
||||
|
||||
for agent in agents:
|
||||
if agent.maintenance_mode:
|
||||
break
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
if agent.checks["has_failing_checks"]:
|
||||
|
||||
if agent.checks["warning"]:
|
||||
@@ -102,19 +113,20 @@ class Client(BaseAuditModel):
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
data["error"] = True
|
||||
break
|
||||
if agent.autotasks.exists(): # type: ignore
|
||||
for i in agent.autotasks.all(): # type: ignore
|
||||
if i.status == "failing" and i.alert_severity == "error":
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def serialize(client):
|
||||
# serializes the client and returns json
|
||||
from .serializers import ClientSerializer
|
||||
from .serializers import ClientAuditSerializer
|
||||
|
||||
return ClientSerializer(client).data
|
||||
# serializes the client and returns json
|
||||
return ClientAuditSerializer(client).data
|
||||
|
||||
|
||||
class Site(BaseAuditModel):
|
||||
@@ -144,13 +156,17 @@ class Site(BaseAuditModel):
|
||||
blank=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kw):
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old client if exists
|
||||
old_site = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(*args, **kw)
|
||||
old_site = Site.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(
|
||||
old_model=old_site,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_site:
|
||||
@@ -159,11 +175,10 @@ class Site(BaseAuditModel):
|
||||
or (old_site.workstation_policy != self.workstation_policy)
|
||||
or (old_site.block_policy_inheritance != self.block_policy_inheritance)
|
||||
):
|
||||
|
||||
generate_agent_checks_task.delay(site=self.pk, create_tasks=True)
|
||||
|
||||
if old_site.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
if old_site.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
@@ -192,12 +207,19 @@ class Site(BaseAuditModel):
|
||||
"offline_time",
|
||||
)
|
||||
.filter(site=self)
|
||||
.prefetch_related("agentchecks")
|
||||
.prefetch_related("agentchecks", "autotasks")
|
||||
)
|
||||
|
||||
data = {"error": False, "warning": False}
|
||||
|
||||
for agent in agents:
|
||||
if agent.maintenance_mode:
|
||||
break
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
if agent.checks["has_failing_checks"]:
|
||||
if agent.checks["warning"]:
|
||||
@@ -207,19 +229,20 @@ class Site(BaseAuditModel):
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
data["error"] = True
|
||||
break
|
||||
if agent.autotasks.exists(): # type: ignore
|
||||
for i in agent.autotasks.all(): # type: ignore
|
||||
if i.status == "failing" and i.alert_severity == "error":
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def serialize(site):
|
||||
# serializes the site and returns json
|
||||
from .serializers import SiteSerializer
|
||||
from .serializers import SiteAuditSerializer
|
||||
|
||||
return SiteSerializer(site).data
|
||||
# serializes the site and returns json
|
||||
return SiteAuditSerializer(site).data
|
||||
|
||||
|
||||
MON_TYPE_CHOICES = [
|
||||
@@ -291,6 +314,22 @@ class ClientCustomField(models.Model):
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif type == "multiple":
|
||||
self.multiple_value = value.split(",")
|
||||
self.save()
|
||||
elif type == "checkbox":
|
||||
self.bool_value = bool(value)
|
||||
self.save()
|
||||
|
||||
|
||||
class SiteCustomField(models.Model):
|
||||
site = models.ForeignKey(
|
||||
@@ -325,3 +364,19 @@ class SiteCustomField(models.Model):
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif type == "multiple":
|
||||
self.multiple_value = value.split(",")
|
||||
self.save()
|
||||
elif type == "checkbox":
|
||||
self.bool_value = bool(value)
|
||||
self.save()
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField, ValidationError
|
||||
from django.db.models.base import Model
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
Serializer,
|
||||
ValidationError,
|
||||
)
|
||||
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
|
||||
@@ -134,3 +140,15 @@ class DeploymentSerializer(ModelSerializer):
|
||||
"install_flags",
|
||||
"created",
|
||||
]
|
||||
|
||||
|
||||
class SiteAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ClientAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = "__all__"
|
||||
|
||||
@@ -3,10 +3,8 @@ import re
|
||||
import uuid
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
@@ -26,8 +24,6 @@ from .serializers import (
|
||||
SiteSerializer,
|
||||
)
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class GetAddClients(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageClientsPerms]
|
||||
@@ -251,16 +247,19 @@ class AgentDeployment(APIView):
|
||||
|
||||
def post(self, request):
|
||||
from knox.models import AuthToken
|
||||
from accounts.models import User
|
||||
|
||||
client = get_object_or_404(Client, pk=request.data["client"])
|
||||
site = get_object_or_404(Site, pk=request.data["site"])
|
||||
|
||||
installer_user = User.objects.filter(is_installer_user=True).first()
|
||||
|
||||
expires = dt.datetime.strptime(
|
||||
request.data["expires"], "%Y-%m-%d %H:%M"
|
||||
).astimezone(pytz.timezone("UTC"))
|
||||
now = djangotime.now()
|
||||
delta = expires - now
|
||||
obj, token = AuthToken.objects.create(user=request.user, expiry=delta)
|
||||
obj, token = AuthToken.objects.create(user=installer_user, expiry=delta)
|
||||
|
||||
flags = {
|
||||
"power": request.data["power"],
|
||||
|
||||
@@ -53,9 +53,9 @@ If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
|
||||
Write-Output "Waiting for network"
|
||||
Start-Sleep -s 5
|
||||
$X += 1
|
||||
} until(($connectreult = Test-NetConnection $apilink[2] -Port 443 | ? { $_.TcpTestSucceeded }) -or $X -eq 3)
|
||||
} until(($connectresult = Test-NetConnection $apilink[2] -Port 443 | ? { $_.TcpTestSucceeded }) -or $X -eq 3)
|
||||
|
||||
if ($connectreult.TcpTestSucceeded -eq $true){
|
||||
if ($connectresult.TcpTestSucceeded -eq $true){
|
||||
Try
|
||||
{
|
||||
Invoke-WebRequest -Uri $downloadlink -OutFile $OutPath\$output
|
||||
|
||||
23
api/tacticalrmm/core/migrations/0024_auto_20210707_1828.py
Normal file
23
api/tacticalrmm/core/migrations/0024_auto_20210707_1828.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-07 18:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0023_coresettings_clear_faults_days'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='agent_history_prune_days',
|
||||
field=models.PositiveIntegerField(default=30),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='resolved_alerts_prune_days',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/core/migrations/0025_auto_20210707_1835.py
Normal file
28
api/tacticalrmm/core/migrations/0025_auto_20210707_1835.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-07 18:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0024_auto_20210707_1828'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='agent_debug_level',
|
||||
field=models.CharField(choices=[('info', 'Info'), ('warning', 'Warning'), ('error', 'Error'), ('critical', 'Critical')], default='info', max_length=20),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='debug_log_prune_days',
|
||||
field=models.PositiveIntegerField(default=30),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='coresettings',
|
||||
name='agent_history_prune_days',
|
||||
field=models.PositiveIntegerField(default=60),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 17:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0025_auto_20210707_1835'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='audit_log_prune_days',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
@@ -1,17 +1,15 @@
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
from django.db.models.enums import Choices
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
from twilio.rest import Client as TwClient
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import BaseAuditModel, DebugLog, LOG_LEVEL_CHOICES
|
||||
|
||||
TZ_CHOICES = [(_, _) for _ in pytz.all_timezones]
|
||||
|
||||
@@ -51,6 +49,13 @@ class CoreSettings(BaseAuditModel):
|
||||
)
|
||||
# removes check history older than days
|
||||
check_history_prune_days = models.PositiveIntegerField(default=30)
|
||||
resolved_alerts_prune_days = models.PositiveIntegerField(default=0)
|
||||
agent_history_prune_days = models.PositiveIntegerField(default=60)
|
||||
debug_log_prune_days = models.PositiveIntegerField(default=30)
|
||||
audit_log_prune_days = models.PositiveIntegerField(default=0)
|
||||
agent_debug_level = models.CharField(
|
||||
max_length=20, choices=LOG_LEVEL_CHOICES, default="info"
|
||||
)
|
||||
clear_faults_days = models.IntegerField(default=0)
|
||||
mesh_token = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||
mesh_username = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||
@@ -184,14 +189,14 @@ class CoreSettings(BaseAuditModel):
|
||||
server.quit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Sending email failed with error: {e}")
|
||||
DebugLog.error(message=f"Sending email failed with error: {e}")
|
||||
if test:
|
||||
return str(e)
|
||||
else:
|
||||
return True
|
||||
|
||||
def send_sms(self, body, alert_template=None):
|
||||
if not alert_template and not self.sms_is_configured:
|
||||
if not alert_template or not self.sms_is_configured:
|
||||
return
|
||||
|
||||
# override email recipients if alert_template is passed and is set
|
||||
@@ -205,7 +210,7 @@ class CoreSettings(BaseAuditModel):
|
||||
try:
|
||||
tw_client.messages.create(body=body, to=num, from_=self.twilio_number)
|
||||
except Exception as e:
|
||||
logger.error(f"SMS failed to send: {e}")
|
||||
DebugLog.error(message=f"SMS failed to send: {e}")
|
||||
|
||||
@staticmethod
|
||||
def serialize(core):
|
||||
@@ -265,6 +270,26 @@ class CustomField(models.Model):
|
||||
else:
|
||||
return self.default_value_string
|
||||
|
||||
def get_or_create_field_value(self, instance):
|
||||
from agents.models import Agent, AgentCustomField
|
||||
from clients.models import Client, ClientCustomField, Site, SiteCustomField
|
||||
|
||||
if isinstance(instance, Agent):
|
||||
if AgentCustomField.objects.filter(field=self, agent=instance).exists():
|
||||
return AgentCustomField.objects.get(field=self, agent=instance)
|
||||
else:
|
||||
return AgentCustomField.objects.create(field=self, agent=instance)
|
||||
elif isinstance(instance, Client):
|
||||
if ClientCustomField.objects.filter(field=self, client=instance).exists():
|
||||
return ClientCustomField.objects.get(field=self, client=instance)
|
||||
else:
|
||||
return ClientCustomField.objects.create(field=self, client=instance)
|
||||
elif isinstance(instance, Site):
|
||||
if SiteCustomField.objects.filter(field=self, site=instance).exists():
|
||||
return SiteCustomField.objects.get(field=self, site=instance)
|
||||
else:
|
||||
return SiteCustomField.objects.create(field=self, site=instance)
|
||||
|
||||
|
||||
class CodeSignToken(models.Model):
|
||||
token = models.CharField(max_length=255, null=True, blank=True)
|
||||
@@ -287,6 +312,9 @@ class GlobalKVStore(models.Model):
|
||||
return self.name
|
||||
|
||||
|
||||
OPEN_ACTIONS = (("window", "New Window"), ("tab", "New Tab"))
|
||||
|
||||
|
||||
class URLAction(models.Model):
|
||||
name = models.CharField(max_length=25)
|
||||
desc = models.CharField(max_length=100, null=True, blank=True)
|
||||
|
||||
@@ -3,6 +3,11 @@ from rest_framework import permissions
|
||||
from tacticalrmm.permissions import _has_perm
|
||||
|
||||
|
||||
class ViewCoreSettingsPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_view_core_settings")
|
||||
|
||||
|
||||
class EditCoreSettingsPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_edit_core_settings")
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
from checks.tasks import prune_check_history
|
||||
from agents.tasks import clear_faults_task
|
||||
from agents.tasks import clear_faults_task, prune_agent_history
|
||||
from alerts.tasks import prune_resolved_alerts
|
||||
from core.models import CoreSettings
|
||||
from logs.tasks import prune_debug_log, prune_audit_log
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@app.task
|
||||
def core_maintenance_tasks():
|
||||
@@ -32,8 +30,39 @@ def core_maintenance_tasks():
|
||||
core = CoreSettings.objects.first()
|
||||
|
||||
# remove old CheckHistory data
|
||||
if core.check_history_prune_days > 0:
|
||||
prune_check_history.delay(core.check_history_prune_days)
|
||||
if core.check_history_prune_days > 0: # type: ignore
|
||||
prune_check_history.delay(core.check_history_prune_days) # type: ignore
|
||||
|
||||
# remove old resolved alerts
|
||||
if core.resolved_alerts_prune_days > 0: # type: ignore
|
||||
prune_resolved_alerts.delay(core.resolved_alerts_prune_days) # type: ignore
|
||||
|
||||
# remove old agent history
|
||||
if core.agent_history_prune_days > 0: # type: ignore
|
||||
prune_agent_history.delay(core.agent_history_prune_days) # type: ignore
|
||||
|
||||
# remove old debug logs
|
||||
if core.debug_log_prune_days > 0: # type: ignore
|
||||
prune_debug_log.delay(core.debug_log_prune_days) # type: ignore
|
||||
|
||||
# remove old audit logs
|
||||
if core.audit_log_prune_days > 0: # type: ignore
|
||||
prune_audit_log.delay(core.audit_log_prune_days) # type: ignore
|
||||
|
||||
# clear faults
|
||||
if core.clear_faults_days > 0:
|
||||
clear_faults_task.delay(core.clear_faults_days)
|
||||
if core.clear_faults_days > 0: # type: ignore
|
||||
clear_faults_task.delay(core.clear_faults_days) # type: ignore
|
||||
|
||||
|
||||
@app.task
|
||||
def cache_db_fields_task():
|
||||
from agents.models import Agent
|
||||
|
||||
for agent in Agent.objects.all():
|
||||
agent.pending_actions_count = agent.pendingactions.filter(
|
||||
status="pending"
|
||||
).count()
|
||||
agent.has_patches_pending = (
|
||||
agent.winupdates.filter(action="approve").filter(installed=False).exists()
|
||||
)
|
||||
agent.save(update_fields=["pending_actions_count", "has_patches_pending"])
|
||||
|
||||
@@ -18,4 +18,5 @@ urlpatterns = [
|
||||
path("urlaction/", views.GetAddURLAction.as_view()),
|
||||
path("urlaction/<int:pk>/", views.UpdateDeleteURLAction.as_view()),
|
||||
path("urlaction/run/", views.RunURLAction.as_view()),
|
||||
path("smstest/", views.TwilioSMSTest.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import pprint
|
||||
import re
|
||||
|
||||
from django.conf import settings
|
||||
@@ -15,7 +16,12 @@ from agents.permissions import MeshPerms
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import CodeSignToken, CoreSettings, CustomField, GlobalKVStore, URLAction
|
||||
from .permissions import CodeSignPerms, EditCoreSettingsPerms, ServerMaintPerms
|
||||
from .permissions import (
|
||||
CodeSignPerms,
|
||||
ViewCoreSettingsPerms,
|
||||
EditCoreSettingsPerms,
|
||||
ServerMaintPerms,
|
||||
)
|
||||
from .serializers import (
|
||||
CodeSignTokenSerializer,
|
||||
CoreSettingsSerializer,
|
||||
@@ -46,6 +52,7 @@ class UploadMeshAgent(APIView):
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([IsAuthenticated, ViewCoreSettingsPerms])
|
||||
def get_core_settings(request):
|
||||
settings = CoreSettings.objects.first()
|
||||
return Response(CoreSettingsSerializer(settings).data)
|
||||
@@ -85,6 +92,7 @@ def dashboard_info(request):
|
||||
"client_tree_sort": request.user.client_tree_sort,
|
||||
"client_tree_splitter": request.user.client_tree_splitter,
|
||||
"loading_bar_color": request.user.loading_bar_color,
|
||||
"clear_search_when_switching": request.user.clear_search_when_switching,
|
||||
"hosted": hasattr(settings, "HOSTED") and settings.HOSTED,
|
||||
}
|
||||
)
|
||||
@@ -338,9 +346,18 @@ class RunURLAction(APIView):
|
||||
from requests.utils import requote_uri
|
||||
|
||||
from agents.models import Agent
|
||||
from clients.models import Client, Site
|
||||
from tacticalrmm.utils import replace_db_values
|
||||
|
||||
agent = get_object_or_404(Agent, pk=request.data["agent"])
|
||||
if "agent" in request.data.keys():
|
||||
instance = get_object_or_404(Agent, pk=request.data["agent"])
|
||||
elif "site" in request.data.keys():
|
||||
instance = get_object_or_404(Site, pk=request.data["site"])
|
||||
elif "client" in request.data.keys():
|
||||
instance = get_object_or_404(Client, pk=request.data["client"])
|
||||
else:
|
||||
return notify_error("received an incorrect request")
|
||||
|
||||
action = get_object_or_404(URLAction, pk=request.data["action"])
|
||||
|
||||
pattern = re.compile("\\{\\{([\\w\\s]+\\.[\\w\\s]+)\\}\\}")
|
||||
@@ -348,8 +365,31 @@ class RunURLAction(APIView):
|
||||
url_pattern = action.pattern
|
||||
|
||||
for string in re.findall(pattern, action.pattern):
|
||||
value = replace_db_values(string=string, agent=agent, quotes=False)
|
||||
value = replace_db_values(string=string, instance=instance, quotes=False)
|
||||
|
||||
url_pattern = re.sub("\\{\\{" + string + "\\}\\}", str(value), url_pattern)
|
||||
|
||||
return Response(requote_uri(url_pattern))
|
||||
|
||||
|
||||
class TwilioSMSTest(APIView):
|
||||
def get(self, request):
|
||||
from twilio.rest import Client as TwClient
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
if not core.sms_is_configured:
|
||||
return notify_error(
|
||||
"All fields are required, including at least 1 recipient"
|
||||
)
|
||||
|
||||
try:
|
||||
tw_client = TwClient(core.twilio_account_sid, core.twilio_auth_token)
|
||||
tw_client.messages.create(
|
||||
body="TacticalRMM Test SMS",
|
||||
to=core.sms_alert_recipients[0],
|
||||
from_=core.twilio_number,
|
||||
)
|
||||
except Exception as e:
|
||||
return notify_error(pprint.pformat(e))
|
||||
|
||||
return Response("SMS Test OK!")
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import AuditLog, PendingAction
|
||||
from .models import AuditLog, PendingAction, DebugLog
|
||||
|
||||
admin.site.register(PendingAction)
|
||||
admin.site.register(AuditLog)
|
||||
admin.site.register(DebugLog)
|
||||
|
||||
68
api/tacticalrmm/logs/migrations/0013_auto_20210614_1835.py
Normal file
68
api/tacticalrmm/logs/migrations/0013_auto_20210614_1835.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# Generated by Django 3.2.1 on 2021-06-14 18:35
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("logs", "0012_auto_20210228_0943"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="agent",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="debuglogs",
|
||||
to="agents.agent",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="entry_time",
|
||||
field=models.DateTimeField(
|
||||
auto_now_add=True, default=django.utils.timezone.now
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="log_level",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("info", "Info"),
|
||||
("warning", "Warning"),
|
||||
("error", "Error"),
|
||||
("critical", "Critical"),
|
||||
],
|
||||
default="info",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="log_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("agent_update", "Agent Update"),
|
||||
("agent_issues", "Agent Issues"),
|
||||
("win_updates", "Windows Updates"),
|
||||
("system_issues", "System Issues"),
|
||||
("scripting", "Scripting"),
|
||||
],
|
||||
default="system_issues",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="message",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/logs/migrations/0014_auditlog_agent_id.py
Normal file
18
api/tacticalrmm/logs/migrations/0014_auditlog_agent_id.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-06-28 02:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0013_auto_20210614_1835'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='auditlog',
|
||||
name='agent_id',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0014_auditlog_agent_id'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='auditlog',
|
||||
name='object_type',
|
||||
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alert_template', 'Alert Template'), ('role', 'Role')], max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 17:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0015_alter_auditlog_object_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='auditlog',
|
||||
name='object_type',
|
||||
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alerttemplate', 'Alert Template'), ('role', 'Role')], max_length=100),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/logs/migrations/0017_auto_20210731_1707.py
Normal file
23
api/tacticalrmm/logs/migrations/0017_auto_20210731_1707.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-31 17:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0016_alter_auditlog_object_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='pendingaction',
|
||||
name='cancelable',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pendingaction',
|
||||
name='action_type',
|
||||
field=models.CharField(blank=True, choices=[('schedreboot', 'Scheduled Reboot'), ('taskaction', 'Scheduled Task Action'), ('agentupdate', 'Agent Update'), ('chocoinstall', 'Chocolatey Software Install'), ('runcmd', 'Run Command'), ('runscript', 'Run Script'), ('runpatchscan', 'Run Patch Scan'), ('runpatchinstall', 'Run Patch Install')], max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -2,14 +2,24 @@ import datetime as dt
|
||||
from abc import abstractmethod
|
||||
|
||||
from django.db import models
|
||||
|
||||
from tacticalrmm.middleware import get_debug_info, get_username
|
||||
|
||||
|
||||
def get_debug_level():
|
||||
from core.models import CoreSettings
|
||||
|
||||
return CoreSettings.objects.first().agent_debug_level # type: ignore
|
||||
|
||||
|
||||
ACTION_TYPE_CHOICES = [
|
||||
("schedreboot", "Scheduled Reboot"),
|
||||
("taskaction", "Scheduled Task Action"), # deprecated
|
||||
("agentupdate", "Agent Update"),
|
||||
("chocoinstall", "Chocolatey Software Install"),
|
||||
("runcmd", "Run Command"),
|
||||
("runscript", "Run Script"),
|
||||
("runpatchscan", "Run Patch Scan"),
|
||||
("runpatchinstall", "Run Patch Install"),
|
||||
]
|
||||
|
||||
AUDIT_ACTION_TYPE_CHOICES = [
|
||||
@@ -40,6 +50,8 @@ AUDIT_OBJECT_TYPE_CHOICES = [
|
||||
("automatedtask", "Automated Task"),
|
||||
("coresettings", "Core Settings"),
|
||||
("bulk", "Bulk"),
|
||||
("alerttemplate", "Alert Template"),
|
||||
("role", "Role"),
|
||||
]
|
||||
|
||||
STATUS_CHOICES = [
|
||||
@@ -51,6 +63,7 @@ STATUS_CHOICES = [
|
||||
class AuditLog(models.Model):
|
||||
username = models.CharField(max_length=100)
|
||||
agent = models.CharField(max_length=255, null=True, blank=True)
|
||||
agent_id = models.PositiveIntegerField(blank=True, null=True)
|
||||
entry_time = models.DateTimeField(auto_now_add=True)
|
||||
action = models.CharField(max_length=100, choices=AUDIT_ACTION_TYPE_CHOICES)
|
||||
object_type = models.CharField(max_length=100, choices=AUDIT_OBJECT_TYPE_CHOICES)
|
||||
@@ -73,24 +86,25 @@ class AuditLog(models.Model):
|
||||
return super(AuditLog, self).save(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def audit_mesh_session(username, hostname, debug_info={}):
|
||||
def audit_mesh_session(username, agent, debug_info={}):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
agent=hostname,
|
||||
agent=agent.hostname,
|
||||
agent_id=agent.id,
|
||||
object_type="agent",
|
||||
action="remote_session",
|
||||
message=f"{username} used Mesh Central to initiate a remote session to {hostname}.",
|
||||
message=f"{username} used Mesh Central to initiate a remote session to {agent.hostname}.",
|
||||
debug_info=debug_info,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def audit_raw_command(username, hostname, cmd, shell, debug_info={}):
|
||||
def audit_raw_command(username, agent, cmd, shell, debug_info={}):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
agent=hostname,
|
||||
agent=agent.hostname,
|
||||
object_type="agent",
|
||||
action="execute_command",
|
||||
message=f"{username} issued {shell} command on {hostname}.",
|
||||
message=f"{username} issued {shell} command on {agent.hostname}.",
|
||||
after_value=cmd,
|
||||
debug_info=debug_info,
|
||||
)
|
||||
@@ -102,6 +116,7 @@ class AuditLog(models.Model):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
object_type=object_type,
|
||||
agent_id=before["id"] if object_type == "agent" else None,
|
||||
action="modify",
|
||||
message=f"{username} modified {object_type} {name}",
|
||||
before_value=before,
|
||||
@@ -114,6 +129,7 @@ class AuditLog(models.Model):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
object_type=object_type,
|
||||
agent=after["id"] if object_type == "agent" else None,
|
||||
action="add",
|
||||
message=f"{username} added {object_type} {name}",
|
||||
after_value=after,
|
||||
@@ -125,6 +141,7 @@ class AuditLog(models.Model):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
object_type=object_type,
|
||||
agent=before["id"] if object_type == "agent" else None,
|
||||
action="delete",
|
||||
message=f"{username} deleted {object_type} {name}",
|
||||
before_value=before,
|
||||
@@ -132,13 +149,14 @@ class AuditLog(models.Model):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def audit_script_run(username, hostname, script, debug_info={}):
|
||||
def audit_script_run(username, agent, script, debug_info={}):
|
||||
AuditLog.objects.create(
|
||||
agent=hostname,
|
||||
agent=agent.hostname,
|
||||
agent_id=agent.id,
|
||||
username=username,
|
||||
object_type="agent",
|
||||
action="execute_script",
|
||||
message=f'{username} ran script: "{script}" on {hostname}',
|
||||
message=f'{username} ran script: "{script}" on {agent.hostname}',
|
||||
debug_info=debug_info,
|
||||
)
|
||||
|
||||
@@ -190,13 +208,13 @@ class AuditLog(models.Model):
|
||||
site = Site.objects.get(pk=affected["site"])
|
||||
target = f"on all agents within site: {site.client.name}\\{site.name}"
|
||||
elif affected["target"] == "agents":
|
||||
agents = Agent.objects.filter(pk__in=affected["agentPKs"]).values_list(
|
||||
agents = Agent.objects.filter(pk__in=affected["agents"]).values_list(
|
||||
"hostname", flat=True
|
||||
)
|
||||
target = "on multiple agents"
|
||||
|
||||
if action == "script":
|
||||
script = Script.objects.get(pk=affected["scriptPK"])
|
||||
script = Script.objects.get(pk=affected["script"])
|
||||
action = f"script: {script.name}"
|
||||
|
||||
if agents:
|
||||
@@ -212,8 +230,63 @@ class AuditLog(models.Model):
|
||||
)
|
||||
|
||||
|
||||
LOG_LEVEL_CHOICES = [
|
||||
("info", "Info"),
|
||||
("warning", "Warning"),
|
||||
("error", "Error"),
|
||||
("critical", "Critical"),
|
||||
]
|
||||
|
||||
LOG_TYPE_CHOICES = [
|
||||
("agent_update", "Agent Update"),
|
||||
("agent_issues", "Agent Issues"),
|
||||
("win_updates", "Windows Updates"),
|
||||
("system_issues", "System Issues"),
|
||||
("scripting", "Scripting"),
|
||||
]
|
||||
|
||||
|
||||
class DebugLog(models.Model):
|
||||
pass
|
||||
entry_time = models.DateTimeField(auto_now_add=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="debuglogs",
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
log_level = models.CharField(
|
||||
max_length=50, choices=LOG_LEVEL_CHOICES, default="info"
|
||||
)
|
||||
log_type = models.CharField(
|
||||
max_length=50, choices=LOG_TYPE_CHOICES, default="system_issues"
|
||||
)
|
||||
message = models.TextField(null=True, blank=True)
|
||||
|
||||
@classmethod
|
||||
def info(
|
||||
cls,
|
||||
message,
|
||||
agent=None,
|
||||
log_type="system_issues",
|
||||
):
|
||||
if get_debug_level() in ["info"]:
|
||||
cls(log_level="info", agent=agent, log_type=log_type, message=message)
|
||||
|
||||
@classmethod
|
||||
def warning(cls, message, agent=None, log_type="system_issues"):
|
||||
if get_debug_level() in ["info", "warning"]:
|
||||
cls(log_level="warning", agent=agent, log_type=log_type, message=message)
|
||||
|
||||
@classmethod
|
||||
def error(cls, message, agent=None, log_type="system_issues"):
|
||||
if get_debug_level() in ["info", "warning", "error"]:
|
||||
cls(log_level="error", agent=agent, log_type=log_type, message=message)
|
||||
|
||||
@classmethod
|
||||
def critical(cls, message, agent=None, log_type="system_issues"):
|
||||
if get_debug_level() in ["info", "warning", "error", "critical"]:
|
||||
cls(log_level="critical", agent=agent, log_type=log_type, message=message)
|
||||
|
||||
|
||||
class PendingAction(models.Model):
|
||||
@@ -232,6 +305,7 @@ class PendingAction(models.Model):
|
||||
choices=STATUS_CHOICES,
|
||||
default="pending",
|
||||
)
|
||||
cancelable = models.BooleanField(blank=True, default=False)
|
||||
celery_id = models.CharField(null=True, blank=True, max_length=255)
|
||||
details = models.JSONField(null=True, blank=True)
|
||||
|
||||
@@ -247,6 +321,8 @@ class PendingAction(models.Model):
|
||||
return "Next update cycle"
|
||||
elif self.action_type == "chocoinstall":
|
||||
return "ASAP"
|
||||
else:
|
||||
return "On next checkin"
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
@@ -259,6 +335,14 @@ class PendingAction(models.Model):
|
||||
elif self.action_type == "chocoinstall":
|
||||
return f"{self.details['name']} software install"
|
||||
|
||||
elif self.action_type in [
|
||||
"runcmd",
|
||||
"runscript",
|
||||
"runpatchscan",
|
||||
"runpatchinstall",
|
||||
]:
|
||||
return f"{self.action_type}"
|
||||
|
||||
|
||||
class BaseAuditModel(models.Model):
|
||||
# abstract base class for auditing models
|
||||
@@ -275,13 +359,14 @@ class BaseAuditModel(models.Model):
|
||||
def serialize():
|
||||
pass
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def save(self, old_model=None, *args, **kwargs):
|
||||
|
||||
if get_username():
|
||||
|
||||
before_value = {}
|
||||
object_class = type(self)
|
||||
object_name = object_class.__name__.lower()
|
||||
username = get_username()
|
||||
after_value = object_class.serialize(self) # type: ignore
|
||||
|
||||
# populate created_by and modified_by fields on instance
|
||||
if not getattr(self, "created_by", None):
|
||||
@@ -289,32 +374,37 @@ class BaseAuditModel(models.Model):
|
||||
if hasattr(self, "modified_by"):
|
||||
self.modified_by = username
|
||||
|
||||
# capture object properties before edit
|
||||
if self.pk:
|
||||
before_value = object_class.objects.get(pk=self.id)
|
||||
|
||||
# dont create entry for agent add since that is done in view
|
||||
if not self.pk:
|
||||
AuditLog.audit_object_add(
|
||||
username,
|
||||
object_name,
|
||||
object_class.serialize(self),
|
||||
after_value, # type: ignore
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
else:
|
||||
AuditLog.audit_object_changed(
|
||||
username,
|
||||
object_class.__name__.lower(),
|
||||
object_class.serialize(before_value),
|
||||
object_class.serialize(self),
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
|
||||
return super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
if old_model:
|
||||
before_value = object_class.serialize(old_model) # type: ignore
|
||||
else:
|
||||
before_value = object_class.serialize(object_class.objects.get(pk=self.pk)) # type: ignore
|
||||
# only create an audit entry if the values have changed
|
||||
if before_value != after_value: # type: ignore
|
||||
|
||||
AuditLog.audit_object_changed(
|
||||
username,
|
||||
object_class.__name__.lower(),
|
||||
before_value,
|
||||
after_value, # type: ignore
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
|
||||
if get_username():
|
||||
|
||||
@@ -322,9 +412,7 @@ class BaseAuditModel(models.Model):
|
||||
AuditLog.audit_object_delete(
|
||||
get_username(),
|
||||
object_class.__name__.lower(),
|
||||
object_class.serialize(self),
|
||||
object_class.serialize(self), # type: ignore
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
|
||||
return super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
|
||||
@@ -2,12 +2,12 @@ from rest_framework import serializers
|
||||
|
||||
from tacticalrmm.utils import get_default_timezone
|
||||
|
||||
from .models import AuditLog, PendingAction
|
||||
from .models import AuditLog, DebugLog, PendingAction
|
||||
|
||||
|
||||
class AuditLogSerializer(serializers.ModelSerializer):
|
||||
|
||||
entry_time = serializers.SerializerMethodField(read_only=True)
|
||||
ip_address = serializers.ReadOnlyField(source="debug_info.ip")
|
||||
|
||||
class Meta:
|
||||
model = AuditLog
|
||||
@@ -19,7 +19,6 @@ class AuditLogSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class PendingActionSerializer(serializers.ModelSerializer):
|
||||
|
||||
hostname = serializers.ReadOnlyField(source="agent.hostname")
|
||||
salt_id = serializers.ReadOnlyField(source="agent.salt_id")
|
||||
client = serializers.ReadOnlyField(source="agent.client.name")
|
||||
@@ -30,3 +29,16 @@ class PendingActionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = PendingAction
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class DebugLogSerializer(serializers.ModelSerializer):
|
||||
agent = serializers.ReadOnlyField(source="agent.hostname")
|
||||
entry_time = serializers.SerializerMethodField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = DebugLog
|
||||
fields = "__all__"
|
||||
|
||||
def get_entry_time(self, log):
|
||||
timezone = get_default_timezone()
|
||||
return log.entry_time.astimezone(timezone).strftime("%m %d %Y %H:%M:%S")
|
||||
|
||||
25
api/tacticalrmm/logs/tasks.py
Normal file
25
api/tacticalrmm/logs/tasks.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_debug_log(older_than_days: int) -> str:
|
||||
from .models import DebugLog
|
||||
|
||||
DebugLog.objects.filter(
|
||||
entry_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_audit_log(older_than_days: int) -> str:
|
||||
from .models import AuditLog
|
||||
|
||||
AuditLog.objects.filter(
|
||||
entry_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
@@ -1,10 +1,11 @@
|
||||
from datetime import datetime, timedelta
|
||||
from itertools import cycle
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class TestAuditViews(TacticalTestCase):
|
||||
@@ -16,20 +17,23 @@ class TestAuditViews(TacticalTestCase):
|
||||
|
||||
# create clients for client filter
|
||||
site = baker.make("clients.Site")
|
||||
baker.make_recipe("agents.agent", site=site, hostname="AgentHostname1")
|
||||
agent1 = baker.make_recipe("agents.agent", site=site, hostname="AgentHostname1")
|
||||
agent2 = baker.make_recipe("agents.agent", hostname="AgentHostname2")
|
||||
agent0 = baker.make_recipe("agents.agent", hostname="AgentHostname")
|
||||
|
||||
# user jim agent logs
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
username="jim",
|
||||
agent="AgentHostname1",
|
||||
entry_time=seq(datetime.now(), timedelta(days=3)),
|
||||
agent_id=agent1.id,
|
||||
_quantity=15,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
username="jim",
|
||||
agent="AgentHostname2",
|
||||
entry_time=seq(datetime.now(), timedelta(days=100)),
|
||||
agent_id=agent2.id,
|
||||
_quantity=8,
|
||||
)
|
||||
|
||||
@@ -38,14 +42,14 @@ class TestAuditViews(TacticalTestCase):
|
||||
"logs.agent_logs",
|
||||
username="james",
|
||||
agent="AgentHostname1",
|
||||
entry_time=seq(datetime.now(), timedelta(days=55)),
|
||||
agent_id=agent1.id,
|
||||
_quantity=7,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
username="james",
|
||||
agent="AgentHostname2",
|
||||
entry_time=seq(datetime.now(), timedelta(days=20)),
|
||||
agent_id=agent2.id,
|
||||
_quantity=10,
|
||||
)
|
||||
|
||||
@@ -53,7 +57,7 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
agent=seq("AgentHostname"),
|
||||
entry_time=seq(datetime.now(), timedelta(days=29)),
|
||||
agent_id=seq(agent1.id),
|
||||
_quantity=5,
|
||||
)
|
||||
|
||||
@@ -61,7 +65,6 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.object_logs",
|
||||
username="james",
|
||||
entry_time=seq(datetime.now(), timedelta(days=5)),
|
||||
_quantity=17,
|
||||
)
|
||||
|
||||
@@ -69,7 +72,6 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.login_logs",
|
||||
username="james",
|
||||
entry_time=seq(datetime.now(), timedelta(days=7)),
|
||||
_quantity=11,
|
||||
)
|
||||
|
||||
@@ -77,51 +79,62 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.login_logs",
|
||||
username="jim",
|
||||
entry_time=seq(datetime.now(), timedelta(days=11)),
|
||||
_quantity=13,
|
||||
)
|
||||
|
||||
return site
|
||||
return {"site": site, "agents": [agent0, agent1, agent2]}
|
||||
|
||||
def test_get_audit_logs(self):
|
||||
url = "/logs/auditlogs/"
|
||||
|
||||
# create data
|
||||
site = self.create_audit_records()
|
||||
data = self.create_audit_records()
|
||||
|
||||
# test data and result counts
|
||||
data = [
|
||||
{"filter": {"timeFilter": 30}, "count": 86},
|
||||
{
|
||||
"filter": {"timeFilter": 45, "agentFilter": ["AgentHostname2"]},
|
||||
"filter": {
|
||||
"timeFilter": 45,
|
||||
"agentFilter": [data["agents"][2].id],
|
||||
},
|
||||
"count": 19,
|
||||
},
|
||||
{
|
||||
"filter": {"userFilter": ["jim"], "agentFilter": ["AgentHostname1"]},
|
||||
"filter": {
|
||||
"userFilter": ["jim"],
|
||||
"agentFilter": [data["agents"][1].id],
|
||||
},
|
||||
"count": 15,
|
||||
},
|
||||
{
|
||||
"filter": {
|
||||
"timeFilter": 180,
|
||||
"userFilter": ["james"],
|
||||
"agentFilter": ["AgentHostname1"],
|
||||
"agentFilter": [data["agents"][1].id],
|
||||
},
|
||||
"count": 7,
|
||||
},
|
||||
{"filter": {}, "count": 86},
|
||||
{"filter": {"agentFilter": ["DoesntExist"]}, "count": 0},
|
||||
{"filter": {"agentFilter": [500]}, "count": 0},
|
||||
{
|
||||
"filter": {
|
||||
"timeFilter": 35,
|
||||
"userFilter": ["james", "jim"],
|
||||
"agentFilter": ["AgentHostname1", "AgentHostname2"],
|
||||
"agentFilter": [
|
||||
data["agents"][1].id,
|
||||
data["agents"][2].id,
|
||||
],
|
||||
},
|
||||
"count": 40,
|
||||
},
|
||||
{"filter": {"timeFilter": 35, "userFilter": ["james", "jim"]}, "count": 81},
|
||||
{"filter": {"objectFilter": ["user"]}, "count": 26},
|
||||
{"filter": {"actionFilter": ["login"]}, "count": 12},
|
||||
{"filter": {"clientFilter": [site.client.id]}, "count": 23},
|
||||
{
|
||||
"filter": {"clientFilter": [data["site"].client.id]},
|
||||
"count": 23,
|
||||
},
|
||||
]
|
||||
|
||||
pagination = {
|
||||
@@ -137,45 +150,15 @@ class TestAuditViews(TacticalTestCase):
|
||||
)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(
|
||||
len(resp.data["audit_logs"]),
|
||||
len(resp.data["audit_logs"]), # type:ignore
|
||||
pagination["rowsPerPage"]
|
||||
if req["count"] > pagination["rowsPerPage"]
|
||||
else req["count"],
|
||||
)
|
||||
self.assertEqual(resp.data["total"], req["count"])
|
||||
self.assertEqual(resp.data["total"], req["count"]) # type:ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_options_filter(self):
|
||||
url = "/logs/auditlogs/optionsfilter/"
|
||||
|
||||
baker.make_recipe("agents.agent", hostname=seq("AgentHostname"), _quantity=5)
|
||||
baker.make_recipe("agents.agent", hostname=seq("Server"), _quantity=3)
|
||||
baker.make("accounts.User", username=seq("Username"), _quantity=7)
|
||||
baker.make("accounts.User", username=seq("soemthing"), _quantity=3)
|
||||
|
||||
data = [
|
||||
{"req": {"type": "agent", "pattern": "AgeNt"}, "count": 5},
|
||||
{"req": {"type": "agent", "pattern": "AgentHostname1"}, "count": 1},
|
||||
{"req": {"type": "agent", "pattern": "hasjhd"}, "count": 0},
|
||||
{"req": {"type": "user", "pattern": "UsEr"}, "count": 7},
|
||||
{"req": {"type": "user", "pattern": "UserName1"}, "count": 1},
|
||||
{"req": {"type": "user", "pattern": "dfdsadf"}, "count": 0},
|
||||
]
|
||||
|
||||
for req in data:
|
||||
resp = self.client.post(url, req["req"], format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), req["count"])
|
||||
|
||||
# test for invalid payload. needs to have either type: user or agent
|
||||
invalid_data = {"type": "object", "pattern": "SomeString"}
|
||||
|
||||
resp = self.client.post(url, invalid_data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_pending_actions(self):
|
||||
url = "/logs/pendingactions/"
|
||||
agent1 = baker.make_recipe("agents.online_agent")
|
||||
@@ -270,3 +253,87 @@ class TestAuditViews(TacticalTestCase):
|
||||
self.assertEqual(r.data, "error deleting sched task") # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_debug_log(self):
|
||||
url = "/logs/debuglog/"
|
||||
|
||||
# create data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
baker.make(
|
||||
"logs.DebugLog",
|
||||
log_level=cycle(["error", "info", "warning", "critical"]),
|
||||
log_type="agent_issues",
|
||||
agent=agent,
|
||||
_quantity=4,
|
||||
)
|
||||
|
||||
logs = baker.make(
|
||||
"logs.DebugLog",
|
||||
log_type="system_issues",
|
||||
log_level=cycle(["error", "info", "warning", "critical"]),
|
||||
_quantity=15,
|
||||
)
|
||||
|
||||
# test agent filter
|
||||
data = {"agentFilter": agent.id}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 4) # type: ignore
|
||||
|
||||
# test log type filter and agent
|
||||
data = {"agentFilter": agent.id, "logLevelFilter": "warning"}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 1) # type: ignore
|
||||
|
||||
# test time filter with other
|
||||
data = {"logTypeFilter": "system_issues", "logLevelFilter": "error"}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 4) # type: ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestLogTasks(TacticalTestCase):
|
||||
def test_prune_debug_log(self):
|
||||
from .models import DebugLog
|
||||
from .tasks import prune_debug_log
|
||||
|
||||
# setup data
|
||||
debug_log = baker.make(
|
||||
"logs.DebugLog",
|
||||
_quantity=50,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for item in debug_log: # type:ignore
|
||||
item.entry_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
item.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_debug_log(30)
|
||||
|
||||
self.assertEqual(DebugLog.objects.count(), 6)
|
||||
|
||||
def test_prune_audit_log(self):
|
||||
from .models import AuditLog
|
||||
from .tasks import prune_audit_log
|
||||
|
||||
# setup data
|
||||
audit_log = baker.make(
|
||||
"logs.AuditLog",
|
||||
_quantity=50,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for item in audit_log: # type:ignore
|
||||
item.entry_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
item.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_audit_log(30)
|
||||
|
||||
self.assertEqual(AuditLog.objects.count(), 6)
|
||||
|
||||
@@ -5,7 +5,5 @@ from . import views
|
||||
urlpatterns = [
|
||||
path("pendingactions/", views.PendingActions.as_view()),
|
||||
path("auditlogs/", views.GetAuditLogs.as_view()),
|
||||
path("auditlogs/optionsfilter/", views.FilterOptionsAuditLog.as_view()),
|
||||
path("debuglog/<mode>/<hostname>/<order>/", views.debug_log),
|
||||
path("downloadlog/", views.download_log),
|
||||
path("debuglog/", views.GetDebugLog.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,28 +1,23 @@
|
||||
import asyncio
|
||||
import subprocess
|
||||
from datetime import datetime as dt
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.paginator import Paginator
|
||||
from django.db.models import Q
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from accounts.serializers import UserSerializer
|
||||
from agents.models import Agent
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
from django.core.paginator import Paginator
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import AuditLog, PendingAction
|
||||
from .models import AuditLog, PendingAction, DebugLog
|
||||
from .permissions import AuditLogPerms, DebugLogPerms, ManagePendingActionPerms
|
||||
from .serializers import AuditLogSerializer, PendingActionSerializer
|
||||
from .serializers import AuditLogSerializer, DebugLogSerializer, PendingActionSerializer
|
||||
|
||||
|
||||
class GetAuditLogs(APIView):
|
||||
@@ -48,7 +43,7 @@ class GetAuditLogs(APIView):
|
||||
timeFilter = Q()
|
||||
|
||||
if "agentFilter" in request.data:
|
||||
agentFilter = Q(agent__in=request.data["agentFilter"])
|
||||
agentFilter = Q(agent_id__in=request.data["agentFilter"])
|
||||
|
||||
elif "clientFilter" in request.data:
|
||||
clients = Client.objects.filter(
|
||||
@@ -95,23 +90,6 @@ class GetAuditLogs(APIView):
|
||||
)
|
||||
|
||||
|
||||
class FilterOptionsAuditLog(APIView):
|
||||
permission_classes = [IsAuthenticated, AuditLogPerms]
|
||||
|
||||
def post(self, request):
|
||||
if request.data["type"] == "agent":
|
||||
agents = Agent.objects.filter(hostname__icontains=request.data["pattern"])
|
||||
return Response(AgentHostnameSerializer(agents, many=True).data)
|
||||
|
||||
if request.data["type"] == "user":
|
||||
users = User.objects.filter(
|
||||
username__icontains=request.data["pattern"], agent=None
|
||||
)
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
|
||||
return Response("error", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class PendingActions(APIView):
|
||||
permission_classes = [IsAuthenticated, ManagePendingActionPerms]
|
||||
|
||||
@@ -156,60 +134,28 @@ class PendingActions(APIView):
|
||||
return Response(f"{action.agent.hostname}: {action.description} was cancelled")
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([IsAuthenticated, DebugLogPerms])
|
||||
def debug_log(request, mode, hostname, order):
|
||||
log_file = settings.LOG_CONFIG["handlers"][0]["sink"]
|
||||
class GetDebugLog(APIView):
|
||||
permission_classes = [IsAuthenticated, DebugLogPerms]
|
||||
|
||||
agents = Agent.objects.prefetch_related("site").only("pk", "hostname")
|
||||
agent_hostnames = AgentHostnameSerializer(agents, many=True)
|
||||
def patch(self, request):
|
||||
|
||||
switch_mode = {
|
||||
"info": "INFO",
|
||||
"critical": "CRITICAL",
|
||||
"error": "ERROR",
|
||||
"warning": "WARNING",
|
||||
}
|
||||
level = switch_mode.get(mode, "INFO")
|
||||
agentFilter = Q()
|
||||
logTypeFilter = Q()
|
||||
logLevelFilter = Q()
|
||||
|
||||
if hostname == "all" and order == "latest":
|
||||
cmd = f"grep -h {level} {log_file} | tac"
|
||||
elif hostname == "all" and order == "oldest":
|
||||
cmd = f"grep -h {level} {log_file}"
|
||||
elif hostname != "all" and order == "latest":
|
||||
cmd = f"grep {hostname} {log_file} | grep -h {level} | tac"
|
||||
elif hostname != "all" and order == "oldest":
|
||||
cmd = f"grep {hostname} {log_file} | grep -h {level}"
|
||||
else:
|
||||
return Response("error", status=status.HTTP_400_BAD_REQUEST)
|
||||
if "logTypeFilter" in request.data:
|
||||
logTypeFilter = Q(log_type=request.data["logTypeFilter"])
|
||||
|
||||
contents = subprocess.run(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
shell=True,
|
||||
)
|
||||
if "logLevelFilter" in request.data:
|
||||
logLevelFilter = Q(log_level=request.data["logLevelFilter"])
|
||||
|
||||
if not contents.stdout:
|
||||
resp = f"No {mode} logs"
|
||||
else:
|
||||
resp = contents.stdout
|
||||
if "agentFilter" in request.data:
|
||||
agentFilter = Q(agent=request.data["agentFilter"])
|
||||
|
||||
return Response({"log": resp, "agents": agent_hostnames.data})
|
||||
debug_logs = (
|
||||
DebugLog.objects.filter(logLevelFilter)
|
||||
.filter(agentFilter)
|
||||
.filter(logTypeFilter)
|
||||
)
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([IsAuthenticated, DebugLogPerms])
|
||||
def download_log(request):
|
||||
log_file = settings.LOG_CONFIG["handlers"][0]["sink"]
|
||||
if settings.DEBUG:
|
||||
with open(log_file, "rb") as f:
|
||||
response = HttpResponse(f.read(), content_type="text/plain")
|
||||
response["Content-Disposition"] = "attachment; filename=debug.log"
|
||||
return response
|
||||
else:
|
||||
response = HttpResponse()
|
||||
response["Content-Disposition"] = "attachment; filename=debug.log"
|
||||
response["X-Accel-Redirect"] = "/private/log/debug.log"
|
||||
return response
|
||||
return Response(DebugLogSerializer(debug_logs, many=True).data)
|
||||
|
||||
@@ -6,4 +6,6 @@ mkdocs-material
|
||||
pymdown-extensions
|
||||
Pygments
|
||||
isort
|
||||
mypy
|
||||
mypy
|
||||
types-pytz
|
||||
types-pytz
|
||||
@@ -1,22 +1,23 @@
|
||||
asgiref==3.3.4
|
||||
asgiref==3.4.1
|
||||
asyncio-nats-client==0.11.4
|
||||
celery==5.1.0
|
||||
celery==5.1.2
|
||||
certifi==2021.5.30
|
||||
cffi==1.14.5
|
||||
channels==3.0.3
|
||||
channels_redis==3.2.0
|
||||
cffi==1.14.6
|
||||
channels==3.0.4
|
||||
channels_redis==3.3.0
|
||||
chardet==4.0.0
|
||||
cryptography==3.4.7
|
||||
cryptography==3.4.8
|
||||
daphne==3.0.2
|
||||
Django==3.2.4
|
||||
django-cors-headers==3.7.0
|
||||
Django==3.2.6
|
||||
django-cors-headers==3.8.0
|
||||
django-ipware==3.0.2
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.4
|
||||
future==0.18.2
|
||||
loguru==0.5.3
|
||||
msgpack==1.0.2
|
||||
packaging==20.9
|
||||
psycopg2-binary==2.8.6
|
||||
packaging==21.0
|
||||
psycopg2-binary==2.9.1
|
||||
pycparser==2.20
|
||||
pycryptodome==3.10.1
|
||||
pyotp==2.6.0
|
||||
@@ -24,13 +25,13 @@ pyparsing==2.4.7
|
||||
pytz==2021.1
|
||||
qrcode==6.1
|
||||
redis==3.5.3
|
||||
requests==2.25.1
|
||||
requests==2.26.0
|
||||
six==1.16.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.59.1
|
||||
urllib3==1.26.5
|
||||
twilio==6.63.1
|
||||
urllib3==1.26.6
|
||||
uWSGI==2.0.19.1
|
||||
validators==0.18.2
|
||||
vine==5.0.0
|
||||
websockets==8.1
|
||||
zipp==3.4.1
|
||||
websockets==9.1
|
||||
zipp==3.5.0
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Browsers",
|
||||
"default_timeout": "300"
|
||||
|
||||
},
|
||||
{
|
||||
"guid": "3ff6a386-11d1-4f9d-8cca-1b0563bb6443",
|
||||
@@ -38,7 +37,7 @@
|
||||
"description": "This script installs Duplicati 2.0.5.1 as a service.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software",
|
||||
"default_timeout": "300"
|
||||
"default_timeout": "300"
|
||||
},
|
||||
{
|
||||
"guid": "81cc5bcb-01bf-4b0c-89b9-0ac0f3fe0c04",
|
||||
@@ -48,7 +47,7 @@
|
||||
"description": "This script will reset all of the Windows Updates components to DEFAULT SETTINGS.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates",
|
||||
"default_timeout": "300"
|
||||
"default_timeout": "300"
|
||||
},
|
||||
{
|
||||
"guid": "8db87ff0-a9b4-4d9d-bc55-377bbcb85b6d",
|
||||
@@ -58,7 +57,7 @@
|
||||
"description": "Cleans the C: drive's Window Temperary files, Windows SoftwareDistribution folder, the local users Temperary folder, IIS logs (if applicable) and empties the recycling bin. All deleted files will go into a log transcript in $env:TEMP. By default this script leaves files that are newer than 7 days old however this variable can be edited.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Maintenance",
|
||||
"default_timeout": "25000"
|
||||
"default_timeout": "25000"
|
||||
},
|
||||
{
|
||||
"guid": "2f28e8c1-ae0f-4b46-a826-f513974526a3",
|
||||
@@ -176,11 +175,29 @@
|
||||
"name": "Screenconnect - Get GUID for client",
|
||||
"description": "Returns Screenconnect GUID for client - Use with Custom Fields for later use. ",
|
||||
"args": [
|
||||
"-serviceName {{client.ScreenConnectService}}"
|
||||
"{{client.ScreenConnectService}}"
|
||||
],
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "9cfdfe8f-82bf-4081-a59f-576d694f4649",
|
||||
"filename": "Win_Teamviewer_Get_ID.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "TeamViewer - Get ClientID for client",
|
||||
"description": "Returns Teamviwer ClientID for client - Use with Custom Fields for later use. ",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "e43081d4-6f71-4ce3-881a-22da749f7a57",
|
||||
"filename": "Win_AnyDesk_Get_Anynet_ID.ps1",
|
||||
"submittedBy": "https://github.com/meuchels",
|
||||
"name": "AnyDesk - Get AnyNetID for client",
|
||||
"description": "Returns AnyNetID for client - Use with Custom Fields for later use. ",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "95a2ee6f-b89b-4551-856e-3081b041caa7",
|
||||
"filename": "Win_Power_Profile_Reset_High_Performance_to_Defaults.ps1",
|
||||
@@ -227,6 +244,30 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "907652a5-9ec1-4759-9871-a7743f805ff2",
|
||||
"filename": "Win_Software_Uninstall.ps1",
|
||||
"submittedBy": "https://github.com/subzdev",
|
||||
"name": "Software Uninstaller - list, find, and uninstall most software",
|
||||
"description": "Allows listing, finding and uninstalling most software on Windows. There will be a best effort to uninstall silently if the silent uninstall string is not provided.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software",
|
||||
"default_timeout": "600"
|
||||
},
|
||||
{
|
||||
"guid": "64c3b1a8-c85f-4800-85a3-485f78a2d9ad",
|
||||
"filename": "Win_Bitdefender_GravityZone_Install.ps1",
|
||||
"submittedBy": "https://github.com/jhtechIL/",
|
||||
"name": "BitDefender Gravity Zone Install",
|
||||
"description": "Installs BitDefender Gravity Zone, requires client custom field setup. See script comments for details",
|
||||
"args": [
|
||||
"-url {{client.bdurl}}",
|
||||
"-exe {{client.bdexe}}"
|
||||
],
|
||||
"default_timeout": "2500",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "da51111c-aff6-4d87-9d76-0608e1f67fe5",
|
||||
"filename": "Win_Defender_Enable.ps1",
|
||||
@@ -255,6 +296,16 @@
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"guid": "0afd8d00-b95b-4318-8d07-0b9bc4424287",
|
||||
"filename": "Win_Feature_NET35_Enable.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Windows Feature - Enable .NET 3.5",
|
||||
"description": "Enables the Windows .NET 3.5 Framework in Turn Features on and off",
|
||||
"shell": "powershell",
|
||||
"default_timeout": "300",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"guid": "24f19ead-fdfe-46b4-9dcb-4cd0e12a3940",
|
||||
"filename": "Win_Speedtest.ps1",
|
||||
@@ -273,6 +324,20 @@
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Active Directory"
|
||||
},
|
||||
{
|
||||
"guid": "5320dfc8-022a-41e7-9e39-11c493545ec9",
|
||||
"filename": "Win_AD_Hudu_ADDS_Documentation.ps1",
|
||||
"submittedBy": "https://github.com/unplugged216",
|
||||
"name": "ADDS - Directory documentation in Hudu",
|
||||
"description": "Auto generates ADDS documentation and submits it to your Hudu instance.",
|
||||
"args": [
|
||||
"-ClientName {{client.name}}",
|
||||
"-HuduBaseDomain {{global.HuduBaseDomain}}",
|
||||
"-HuduApiKey {{global.HuduApiKey}}"
|
||||
],
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Active Directory"
|
||||
},
|
||||
{
|
||||
"guid": "b6b9912f-4274-4162-99cc-9fd47fbcb292",
|
||||
"filename": "Win_ADDC_Sync_Start.bat",
|
||||
@@ -355,14 +420,14 @@
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "5615aa90-0272-427b-8acf-0ca019612501",
|
||||
"filename": "Win_Chocolatey_Update_Installed.bat",
|
||||
"guid": "6c78eb04-57ae-43b0-98ed-cbd3ef9e2f80",
|
||||
"filename": "Win_Chocolatey_Manage_Apps_Bulk.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Update Installed Apps",
|
||||
"description": "Update all apps that were installed using Chocolatey.",
|
||||
"shell": "cmd",
|
||||
"name": "Chocolatey - Install, Uninstall and Upgrade Software",
|
||||
"description": "This script installs, uninstalls and updates software using Chocolatey with logic to slow tasks to minimize hitting community limits. Mode install/uninstall/upgrade Hosts x",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey",
|
||||
"default_timeout": "3600"
|
||||
"default_timeout": "600"
|
||||
},
|
||||
{
|
||||
"guid": "fff8024d-d72e-4457-84fa-6c780f69a16f",
|
||||
@@ -437,6 +502,16 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "93038ae0-58ce-433e-a3b9-bc99ad1ea79a",
|
||||
"filename": "Win_Services_AutomaticStartup_Running.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Ensure all services with startup type Automatic are running",
|
||||
"description": "Gets a list of all service with startup type of Automatic but aren't running and tries to start them",
|
||||
"shell": "powershell",
|
||||
"default_timeout": "300",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "e09895d5-ca13-44a2-a38c-6e77c740f0e8",
|
||||
"filename": "Win_ScreenConnectAIO.ps1",
|
||||
@@ -494,6 +569,16 @@
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "7c0c7e37-60ff-462f-9c34-b5cd4c4796a7",
|
||||
"filename": "Win_Wifi_SSID_and_Password_Retrieval.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network Wireless - Retrieve Saved passwords",
|
||||
"description": "Returns all saved wifi passwords stored on the computer",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "abe78170-7cf9-435b-9666-c5ef6c11a106",
|
||||
"filename": "Win_Network_IPv6_Disable.ps1",
|
||||
@@ -514,6 +599,16 @@
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "5676acca-44e5-46c8-af61-ae795ecb3ef1",
|
||||
"filename": "Win_Network_IP_DHCP_Renew.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network - Release and Renew IP",
|
||||
"description": "Trigger and release and renew of IP address on all network adapters",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "83aa4d51-63ce-41e7-829f-3c16e6115bbf",
|
||||
"filename": "Win_Network_DNS_Set_to_1.1.1.2.ps1",
|
||||
@@ -544,6 +639,16 @@
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "43e65e5f-717a-4b6d-a724-1a86229fcd42",
|
||||
"filename": "Win_Activation_Check.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Windows Activation check",
|
||||
"description": "Checks to see if windows is activated and returns status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "120"
|
||||
},
|
||||
{
|
||||
"guid": "83f6c6ea-6120-4fd3-bec8-d3abc505dcdf",
|
||||
"filename": "Win_TRMM_Start_Menu_Delete_Shortcut.ps1",
|
||||
@@ -626,9 +731,18 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "6a52f495-d43e-40f4-91a9-bbe4f578e6d1",
|
||||
"filename": "Win_User_Create.ps1",
|
||||
"submittedBy": "https://github.com/brodur",
|
||||
"name": "Create Local User",
|
||||
"description": "Create a local user. Parameters are: username, password and optional: description, fullname, group (adds to Users if not specified)",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "57997ec7-b293-4fd5-9f90-a25426d0eb90",
|
||||
"filename": "Win_Get_Computer_Users.ps1",
|
||||
"filename": "Win_Users_List.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Get Computer Users",
|
||||
"description": "Get list of computer users and show which one is enabled",
|
||||
@@ -682,5 +796,25 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Misc>Reference",
|
||||
"default_timeout": "1"
|
||||
},
|
||||
{
|
||||
"guid": "453c6d22-84b7-4767-8b5f-b825f233cf55",
|
||||
"filename": "Win_AD_Join_Computer.ps1",
|
||||
"submittedBy": "https://github.com/rfost52",
|
||||
"name": "AD - Join Computer to Domain",
|
||||
"description": "Join computer to a domain in Active Directory",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Active Directory",
|
||||
"default_timeout": "300"
|
||||
},
|
||||
{
|
||||
"guid": "962d3cce-49a2-4f3e-a790-36f62a6799a0",
|
||||
"filename": "Win_Collect_System_Report_And_Email.ps1",
|
||||
"submittedBy": "https://github.com/rfost52",
|
||||
"name": "Collect System Report and Email",
|
||||
"description": "Generates a system report in HTML format, then emails it",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "300"
|
||||
}
|
||||
]
|
||||
]
|
||||
22
api/tacticalrmm/scripts/migrations/0009_scriptsnippet.py
Normal file
22
api/tacticalrmm/scripts/migrations/0009_scriptsnippet.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 19:25
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0008_script_guid'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ScriptSnippet',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=40)),
|
||||
('code', models.TextField()),
|
||||
('shell', models.CharField(choices=[('powershell', 'Powershell'), ('cmd', 'Batch (CMD)'), ('python', 'Python')], max_length=15)),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-26 16:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0009_scriptsnippet'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='scriptsnippet',
|
||||
name='desc',
|
||||
field=models.CharField(blank=True, max_length=50, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='script',
|
||||
name='code_base64',
|
||||
field=models.TextField(blank=True, default='', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='script',
|
||||
name='description',
|
||||
field=models.TextField(blank=True, default='', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='name',
|
||||
field=models.CharField(max_length=40, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-31 17:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0010_auto_20210726_1634'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='code',
|
||||
field=models.TextField(default=''),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='desc',
|
||||
field=models.CharField(blank=True, default='', max_length=50),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='shell',
|
||||
field=models.CharField(choices=[('powershell', 'Powershell'), ('cmd', 'Batch (CMD)'), ('python', 'Python')], default='powershell', max_length=15),
|
||||
),
|
||||
]
|
||||
@@ -1,12 +1,10 @@
|
||||
import base64
|
||||
import re
|
||||
from typing import List, Optional
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
|
||||
from django.db.models.fields import CharField, TextField
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.utils import replace_db_values
|
||||
|
||||
@@ -21,13 +19,11 @@ SCRIPT_TYPES = [
|
||||
("builtin", "Built In"),
|
||||
]
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class Script(BaseAuditModel):
|
||||
guid = name = models.CharField(max_length=64, null=True, blank=True)
|
||||
guid = models.CharField(max_length=64, null=True, blank=True)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(null=True, blank=True)
|
||||
description = models.TextField(null=True, blank=True, default="")
|
||||
filename = models.CharField(max_length=255) # deprecated
|
||||
shell = models.CharField(
|
||||
max_length=100, choices=SCRIPT_SHELLS, default="powershell"
|
||||
@@ -43,20 +39,44 @@ class Script(BaseAuditModel):
|
||||
)
|
||||
favorite = models.BooleanField(default=False)
|
||||
category = models.CharField(max_length=100, null=True, blank=True)
|
||||
code_base64 = models.TextField(null=True, blank=True)
|
||||
code_base64 = models.TextField(null=True, blank=True, default="")
|
||||
default_timeout = models.PositiveIntegerField(default=90)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
def code_no_snippets(self):
|
||||
if self.code_base64:
|
||||
base64_bytes = self.code_base64.encode("ascii", "ignore")
|
||||
return base64.b64decode(base64_bytes).decode("ascii", "ignore")
|
||||
return base64.b64decode(self.code_base64.encode("ascii", "ignore")).decode(
|
||||
"ascii", "ignore"
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
return self.replace_with_snippets(self.code_no_snippets)
|
||||
|
||||
@classmethod
|
||||
def replace_with_snippets(cls, code):
|
||||
# check if snippet has been added to script body
|
||||
matches = re.finditer(r"{{(.*)}}", code)
|
||||
if matches:
|
||||
replaced_code = code
|
||||
for snippet in matches:
|
||||
snippet_name = snippet.group(1).strip()
|
||||
if ScriptSnippet.objects.filter(name=snippet_name).exists():
|
||||
value = ScriptSnippet.objects.get(name=snippet_name).code
|
||||
else:
|
||||
value = ""
|
||||
|
||||
replaced_code = re.sub(snippet.group(), value, replaced_code)
|
||||
|
||||
return replaced_code
|
||||
else:
|
||||
return code
|
||||
|
||||
@classmethod
|
||||
def load_community_scripts(cls):
|
||||
import json
|
||||
@@ -97,20 +117,20 @@ class Script(BaseAuditModel):
|
||||
|
||||
if s.exists():
|
||||
i = s.first()
|
||||
i.name = script["name"]
|
||||
i.description = script["description"]
|
||||
i.category = category
|
||||
i.shell = script["shell"]
|
||||
i.default_timeout = default_timeout
|
||||
i.args = args
|
||||
i.name = script["name"] # type: ignore
|
||||
i.description = script["description"] # type: ignore
|
||||
i.category = category # type: ignore
|
||||
i.shell = script["shell"] # type: ignore
|
||||
i.default_timeout = default_timeout # type: ignore
|
||||
i.args = args # type: ignore
|
||||
|
||||
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||
script_bytes = (
|
||||
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||
)
|
||||
i.code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||
i.code_base64 = base64.b64encode(script_bytes).decode("ascii") # type: ignore
|
||||
|
||||
i.save(
|
||||
i.save( # type: ignore
|
||||
update_fields=[
|
||||
"name",
|
||||
"description",
|
||||
@@ -175,7 +195,6 @@ class Script(BaseAuditModel):
|
||||
guid=script["guid"],
|
||||
name=script["name"],
|
||||
description=script["description"],
|
||||
filename=script["filename"],
|
||||
shell=script["shell"],
|
||||
script_type="builtin",
|
||||
category=category,
|
||||
@@ -209,7 +228,7 @@ class Script(BaseAuditModel):
|
||||
if match:
|
||||
# only get the match between the () in regex
|
||||
string = match.group(1)
|
||||
value = replace_db_values(string=string, agent=agent, shell=shell)
|
||||
value = replace_db_values(string=string, instance=agent, shell=shell)
|
||||
|
||||
if value:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg))
|
||||
@@ -221,3 +240,13 @@ class Script(BaseAuditModel):
|
||||
temp_args.append(arg)
|
||||
|
||||
return temp_args
|
||||
|
||||
|
||||
class ScriptSnippet(models.Model):
|
||||
name = CharField(max_length=40, unique=True)
|
||||
desc = CharField(max_length=50, blank=True, default="")
|
||||
code = TextField(default="")
|
||||
shell = CharField(max_length=15, choices=SCRIPT_SHELLS, default="powershell")
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||
|
||||
from .models import Script
|
||||
from .models import Script, ScriptSnippet
|
||||
|
||||
|
||||
class ScriptTableSerializer(ModelSerializer):
|
||||
@@ -41,3 +41,9 @@ class ScriptCheckSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Script
|
||||
fields = ["code", "shell"]
|
||||
|
||||
|
||||
class ScriptSnippetSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = ScriptSnippet
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
import asyncio
|
||||
|
||||
from agents.models import Agent
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent, AgentHistory
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
def handle_bulk_command_task(
|
||||
agentpks, cmd, shell, timeout, username, run_on_offline=False
|
||||
) -> None:
|
||||
nats_data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
@@ -16,20 +20,31 @@ def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
},
|
||||
}
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="cmd_run",
|
||||
command=cmd,
|
||||
username=username,
|
||||
)
|
||||
nats_data["id"] = hist.pk
|
||||
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout) -> None:
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout, username) -> None:
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
nats_data = {
|
||||
"func": "runscript",
|
||||
"timeout": timeout,
|
||||
"script_args": args,
|
||||
"payload": {
|
||||
"code": script.code,
|
||||
"shell": script.shell,
|
||||
},
|
||||
}
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
history_pk = 0
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="script_run",
|
||||
script=script,
|
||||
username=username,
|
||||
)
|
||||
history_pk = hist.pk
|
||||
agent.run_script(
|
||||
scriptpk=script.pk, args=args, timeout=timeout, history_pk=history_pk
|
||||
)
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from model_bakery import baker
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Script
|
||||
from .serializers import ScriptSerializer, ScriptTableSerializer
|
||||
from .models import Script, ScriptSnippet
|
||||
from .serializers import (
|
||||
ScriptSerializer,
|
||||
ScriptTableSerializer,
|
||||
ScriptSnippetSerializer,
|
||||
)
|
||||
|
||||
|
||||
class TestScriptViews(TacticalTestCase):
|
||||
@@ -18,7 +21,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.authenticate()
|
||||
|
||||
def test_get_scripts(self):
|
||||
url = "/scripts/scripts/"
|
||||
url = "/scripts/"
|
||||
scripts = baker.make("scripts.Script", _quantity=3)
|
||||
|
||||
serializer = ScriptTableSerializer(scripts, many=True)
|
||||
@@ -29,14 +32,14 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_script(self):
|
||||
url = f"/scripts/scripts/"
|
||||
url = f"/scripts/"
|
||||
|
||||
data = {
|
||||
"name": "Name",
|
||||
"description": "Description",
|
||||
"shell": "powershell",
|
||||
"category": "New",
|
||||
"code": "Some Test Code\nnew Line",
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"default_timeout": 99,
|
||||
"args": ["hello", "world", r"{{agent.public_ip}}"],
|
||||
"favorite": False,
|
||||
@@ -46,47 +49,24 @@ class TestScriptViews(TacticalTestCase):
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Script.objects.filter(name="Name").exists())
|
||||
self.assertEqual(Script.objects.get(name="Name").code, data["code"])
|
||||
|
||||
# test with file upload
|
||||
# file with 'Test' as content
|
||||
file = SimpleUploadedFile(
|
||||
"test_script.bat", b"\x54\x65\x73\x74", content_type="text/plain"
|
||||
)
|
||||
data = {
|
||||
"name": "New Name",
|
||||
"description": "Description",
|
||||
"shell": "cmd",
|
||||
"category": "New",
|
||||
"filename": file,
|
||||
"default_timeout": 4455,
|
||||
"args": json.dumps(
|
||||
["hello", "world", r"{{agent.public_ip}}"]
|
||||
), # simulate javascript's JSON.stringify() for formData
|
||||
}
|
||||
|
||||
# test with file upload
|
||||
resp = self.client.post(url, data, format="multipart")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
script = Script.objects.filter(name="New Name").first()
|
||||
self.assertEquals(script.code, "Test")
|
||||
self.assertEqual(Script.objects.get(name="Name").code, "Test")
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_modify_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.put("/scripts/500/script/", format="json")
|
||||
resp = self.client.put("/scripts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# make a userdefined script
|
||||
script = baker.make_recipe("scripts.script")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/"
|
||||
|
||||
data = {
|
||||
"name": script.name,
|
||||
"description": "Description Change",
|
||||
"shell": script.shell,
|
||||
"code": "Test Code\nAnother Line",
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"default_timeout": 13344556,
|
||||
}
|
||||
|
||||
@@ -95,16 +75,18 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
script = Script.objects.get(pk=script.pk)
|
||||
self.assertEquals(script.description, "Description Change")
|
||||
self.assertEquals(script.code, "Test Code\nAnother Line")
|
||||
self.assertEquals(script.code, "Test")
|
||||
|
||||
# test edit a builtin script
|
||||
|
||||
data = {"name": "New Name", "description": "New Desc", "code": "Some New Code"}
|
||||
data = {
|
||||
"name": "New Name",
|
||||
"description": "New Desc",
|
||||
"code_base64": "VGVzdA==",
|
||||
} # Test
|
||||
builtin_script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||
|
||||
resp = self.client.put(
|
||||
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
||||
)
|
||||
resp = self.client.put(f"/scripts/{builtin_script.pk}/", data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
data = {
|
||||
@@ -112,13 +94,11 @@ class TestScriptViews(TacticalTestCase):
|
||||
"description": "Description Change",
|
||||
"shell": script.shell,
|
||||
"favorite": True,
|
||||
"code": "Test Code\nAnother Line",
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"default_timeout": 54345,
|
||||
}
|
||||
# test marking a builtin script as favorite
|
||||
resp = self.client.put(
|
||||
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
||||
)
|
||||
resp = self.client.put(f"/scripts/{builtin_script.pk}/", data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Script.objects.get(pk=builtin_script.pk).favorite)
|
||||
|
||||
@@ -126,11 +106,11 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
def test_get_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.get("/scripts/500/script/", format="json")
|
||||
resp = self.client.get("/scripts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
script = baker.make("scripts.Script")
|
||||
url = f"/scripts/{script.pk}/script/" # type: ignore
|
||||
url = f"/scripts/{script.pk}/" # type: ignore
|
||||
serializer = ScriptSerializer(script)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -138,14 +118,34 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_test_script(self, run_script):
|
||||
url = "/scripts/testscript/"
|
||||
|
||||
run_script.return_value = "return value"
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
data = {
|
||||
"agent": agent.pk,
|
||||
"code": "some_code",
|
||||
"timeout": 90,
|
||||
"args": [],
|
||||
"shell": "powershell",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, "return value") # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_delete_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.delete("/scripts/500/script/", format="json")
|
||||
resp = self.client.delete("/scripts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete script
|
||||
script = baker.make_recipe("scripts.script")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/"
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
@@ -153,7 +153,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
# test delete community script
|
||||
script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/"
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
@@ -161,7 +161,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
def test_download_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.get("/scripts/500/download/", format="json")
|
||||
resp = self.client.get("/scripts/download/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# return script code property should be "Test"
|
||||
@@ -170,7 +170,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
script = baker.make(
|
||||
"scripts.Script", code_base64="VGVzdA==", shell="powershell"
|
||||
)
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
url = f"/scripts/download/{script.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -178,7 +178,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
# test batch file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="cmd")
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
url = f"/scripts/download/{script.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -186,7 +186,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
# test python file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="python")
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
url = f"/scripts/download/{script.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -497,3 +497,106 @@ class TestScriptViews(TacticalTestCase):
|
||||
["-Parameter", "-Another $True"],
|
||||
Script.parse_script_args(agent=agent, shell="powershell", args=args),
|
||||
)
|
||||
|
||||
|
||||
class TestScriptSnippetViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.authenticate()
|
||||
|
||||
def test_get_script_snippets(self):
|
||||
url = "/scripts/snippets/"
|
||||
snippets = baker.make("scripts.ScriptSnippet", _quantity=3)
|
||||
|
||||
serializer = ScriptSnippetSerializer(snippets, many=True)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_script_snippet(self):
|
||||
url = f"/scripts/snippets/"
|
||||
|
||||
data = {
|
||||
"name": "Name",
|
||||
"description": "Description",
|
||||
"shell": "powershell",
|
||||
"code": "Test",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(ScriptSnippet.objects.filter(name="Name").exists())
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_modify_script_snippet(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.put("/scripts/snippets/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# make a userdefined script
|
||||
snippet = baker.make("scripts.ScriptSnippet", name="Test")
|
||||
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
|
||||
|
||||
data = {"name": "New Name"} # type: ignore
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
snippet = ScriptSnippet.objects.get(pk=snippet.pk) # type: ignore
|
||||
self.assertEquals(snippet.name, "New Name")
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_get_script_snippet(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.get("/scripts/snippets/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
snippet = baker.make("scripts.ScriptSnippet")
|
||||
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
|
||||
serializer = ScriptSnippetSerializer(snippet)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_delete_script_snippet(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.delete("/scripts/snippets/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete script snippet
|
||||
snippet = baker.make("scripts.ScriptSnippet")
|
||||
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.assertFalse(ScriptSnippet.objects.filter(pk=snippet.pk).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_snippet_replacement(self):
|
||||
|
||||
snippet1 = baker.make(
|
||||
"scripts.ScriptSnippet", name="snippet1", code="Snippet 1 Code"
|
||||
)
|
||||
snippet2 = baker.make(
|
||||
"scripts.ScriptSnippet", name="snippet2", code="Snippet 2 Code"
|
||||
)
|
||||
|
||||
test_no_snippet = "No Snippets Here"
|
||||
test_with_snippet = "Snippet 1: {{snippet1}}\nSnippet 2: {{snippet2}}"
|
||||
|
||||
# test putting snippet in text
|
||||
result = Script.replace_with_snippets(test_with_snippet)
|
||||
self.assertEqual(
|
||||
result,
|
||||
f"Snippet 1: {snippet1.code}\nSnippet 2: {snippet2.code}", # type:ignore
|
||||
)
|
||||
|
||||
# test text with no snippets
|
||||
result = Script.replace_with_snippets(test_no_snippet)
|
||||
self.assertEqual(result, test_no_snippet)
|
||||
|
||||
@@ -3,7 +3,10 @@ from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("scripts/", views.GetAddScripts.as_view()),
|
||||
path("<int:pk>/script/", views.GetUpdateDeleteScript.as_view()),
|
||||
path("<int:pk>/download/", views.download),
|
||||
path("", views.GetAddScripts.as_view()),
|
||||
path("<int:pk>/", views.GetUpdateDeleteScript.as_view()),
|
||||
path("snippets/", views.GetAddScriptSnippets.as_view()),
|
||||
path("snippets/<int:pk>/", views.GetUpdateDeleteScriptSnippet.as_view()),
|
||||
path("testscript/", views.TestScript.as_view()),
|
||||
path("download/<int:pk>/", views.download),
|
||||
]
|
||||
|
||||
@@ -1,64 +1,39 @@
|
||||
import base64
|
||||
import json
|
||||
import asyncio
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.parsers import FileUploadParser
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import Script
|
||||
from .models import Script, ScriptSnippet
|
||||
from .permissions import ManageScriptsPerms
|
||||
from .serializers import ScriptSerializer, ScriptTableSerializer
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from agents.permissions import RunScriptPerms
|
||||
from .serializers import (
|
||||
ScriptSerializer,
|
||||
ScriptTableSerializer,
|
||||
ScriptSnippetSerializer,
|
||||
)
|
||||
|
||||
|
||||
class GetAddScripts(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageScriptsPerms]
|
||||
parser_class = (FileUploadParser,)
|
||||
|
||||
def get(self, request):
|
||||
scripts = Script.objects.all()
|
||||
|
||||
showCommunityScripts = request.GET.get("showCommunityScripts", True)
|
||||
if not showCommunityScripts or showCommunityScripts == "false":
|
||||
scripts = Script.objects.filter(script_type="userdefined")
|
||||
else:
|
||||
scripts = Script.objects.all()
|
||||
|
||||
return Response(ScriptTableSerializer(scripts, many=True).data)
|
||||
|
||||
def post(self, request, format=None):
|
||||
data = {
|
||||
"name": request.data["name"],
|
||||
"category": request.data["category"],
|
||||
"description": request.data["description"],
|
||||
"shell": request.data["shell"],
|
||||
"default_timeout": request.data["default_timeout"],
|
||||
"script_type": "userdefined", # force all uploads to be userdefined. built in scripts cannot be edited by user
|
||||
}
|
||||
def post(self, request):
|
||||
|
||||
# code editor upload
|
||||
if "args" in request.data.keys() and isinstance(request.data["args"], list):
|
||||
data["args"] = request.data["args"]
|
||||
|
||||
# file upload, have to json load it cuz it's formData
|
||||
if "args" in request.data.keys() and "file_upload" in request.data.keys():
|
||||
data["args"] = json.loads(request.data["args"])
|
||||
|
||||
if "favorite" in request.data.keys():
|
||||
data["favorite"] = request.data["favorite"]
|
||||
|
||||
if "filename" in request.data.keys():
|
||||
message_bytes = request.data["filename"].read()
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode(
|
||||
"ascii", "ignore"
|
||||
)
|
||||
|
||||
elif "code" in request.data.keys():
|
||||
message_bytes = request.data["code"].encode("ascii", "ignore")
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||
|
||||
serializer = ScriptSerializer(data=data, partial=True)
|
||||
serializer = ScriptSerializer(data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
|
||||
@@ -85,11 +60,6 @@ class GetUpdateDeleteScript(APIView):
|
||||
else:
|
||||
return notify_error("Community scripts cannot be edited.")
|
||||
|
||||
elif "code" in data:
|
||||
message_bytes = data["code"].encode("ascii")
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||
data.pop("code")
|
||||
|
||||
serializer = ScriptSerializer(data=data, instance=script, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
@@ -107,11 +77,87 @@ class GetUpdateDeleteScript(APIView):
|
||||
return Response(f"{script.name} was deleted!")
|
||||
|
||||
|
||||
class GetAddScriptSnippets(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageScriptsPerms]
|
||||
|
||||
def get(self, request):
|
||||
snippets = ScriptSnippet.objects.all()
|
||||
return Response(ScriptSnippetSerializer(snippets, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
|
||||
serializer = ScriptSnippetSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("Script snippet was saved successfully")
|
||||
|
||||
|
||||
class GetUpdateDeleteScriptSnippet(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageScriptsPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
snippet = get_object_or_404(ScriptSnippet, pk=pk)
|
||||
return Response(ScriptSnippetSerializer(snippet).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
snippet = get_object_or_404(ScriptSnippet, pk=pk)
|
||||
|
||||
serializer = ScriptSnippetSerializer(
|
||||
instance=snippet, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("Script snippet was saved successfully")
|
||||
|
||||
def delete(self, request, pk):
|
||||
snippet = get_object_or_404(ScriptSnippet, pk=pk)
|
||||
snippet.delete()
|
||||
|
||||
return Response("Script snippet was deleted successfully")
|
||||
|
||||
|
||||
class TestScript(APIView):
|
||||
permission_classes = [IsAuthenticated, RunScriptPerms]
|
||||
|
||||
def post(self, request):
|
||||
from .models import Script
|
||||
from agents.models import Agent
|
||||
|
||||
agent = get_object_or_404(Agent, pk=request.data["agent"])
|
||||
|
||||
parsed_args = Script.parse_script_args(
|
||||
agent, request.data["shell"], request.data["args"]
|
||||
)
|
||||
|
||||
data = {
|
||||
"func": "runscript",
|
||||
"timeout": request.data["timeout"],
|
||||
"script_args": parsed_args,
|
||||
"payload": {
|
||||
"code": Script.replace_with_snippets(request.data["code"]),
|
||||
"shell": request.data["shell"],
|
||||
},
|
||||
}
|
||||
|
||||
r = asyncio.run(
|
||||
agent.nats_cmd(data, timeout=request.data["timeout"], wait=True)
|
||||
)
|
||||
|
||||
return Response(r)
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([IsAuthenticated, ManageScriptsPerms])
|
||||
def download(request, pk):
|
||||
script = get_object_or_404(Script, pk=pk)
|
||||
|
||||
with_snippets = request.GET.get("with_snippets", True)
|
||||
|
||||
if with_snippets == "false":
|
||||
with_snippets = False
|
||||
|
||||
if script.shell == "powershell":
|
||||
filename = f"{script.name}.ps1"
|
||||
elif script.shell == "cmd":
|
||||
@@ -119,4 +165,9 @@ def download(request, pk):
|
||||
else:
|
||||
filename = f"{script.name}.py"
|
||||
|
||||
return Response({"filename": filename, "code": script.code})
|
||||
return Response(
|
||||
{
|
||||
"filename": filename,
|
||||
"code": script.code if with_snippets else script.code_no_snippets,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,21 +1,16 @@
|
||||
import asyncio
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .permissions import ManageWinSvcsPerms
|
||||
from .serializers import ServicesSerializer
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view()
|
||||
def get_services(request, pk):
|
||||
|
||||
@@ -35,9 +35,13 @@ app.conf.beat_schedule = {
|
||||
"task": "agents.tasks.auto_self_agent_update_task",
|
||||
"schedule": crontab(minute=35, hour="*"),
|
||||
},
|
||||
"monitor-agents": {
|
||||
"task": "agents.tasks.monitor_agents_task",
|
||||
"schedule": crontab(minute="*/7"),
|
||||
"handle-agents": {
|
||||
"task": "agents.tasks.handle_agents_task",
|
||||
"schedule": crontab(minute="*"),
|
||||
},
|
||||
"get-agentinfo": {
|
||||
"task": "agents.tasks.agent_getinfo_task",
|
||||
"schedule": crontab(minute="*"),
|
||||
},
|
||||
"get-wmi": {
|
||||
"task": "agents.tasks.get_wmi_task",
|
||||
@@ -54,10 +58,12 @@ def debug_task(self):
|
||||
@app.on_after_finalize.connect
|
||||
def setup_periodic_tasks(sender, **kwargs):
|
||||
|
||||
from agents.tasks import agent_outages_task
|
||||
from agents.tasks import agent_outages_task, agent_checkin_task
|
||||
from alerts.tasks import unsnooze_alerts
|
||||
from core.tasks import core_maintenance_tasks
|
||||
from core.tasks import core_maintenance_tasks, cache_db_fields_task
|
||||
|
||||
sender.add_periodic_task(45.0, agent_checkin_task.s())
|
||||
sender.add_periodic_task(60.0, agent_outages_task.s())
|
||||
sender.add_periodic_task(60.0 * 30, core_maintenance_tasks.s())
|
||||
sender.add_periodic_task(60.0 * 60, unsnooze_alerts.s())
|
||||
sender.add_periodic_task(90.0, cache_db_fields_task.s())
|
||||
|
||||
@@ -2,6 +2,7 @@ import threading
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from ipware import get_client_ip
|
||||
|
||||
request_local = threading.local()
|
||||
|
||||
@@ -67,6 +68,7 @@ class AuditMiddleware:
|
||||
debug_info["view_func"] = view_func.__name__
|
||||
debug_info["view_args"] = view_args
|
||||
debug_info["view_kwargs"] = view_kwargs
|
||||
debug_info["ip"] = request._client_ip
|
||||
|
||||
request_local.debug_info = debug_info
|
||||
|
||||
@@ -83,3 +85,15 @@ class AuditMiddleware:
|
||||
request_local.debug_info = None
|
||||
request_local.username = None
|
||||
return response
|
||||
|
||||
|
||||
class LogIPMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
client_ip, is_routable = get_client_ip(request)
|
||||
|
||||
request._client_ip = client_ip
|
||||
response = self.get_response(request)
|
||||
return response
|
||||
|
||||
@@ -15,23 +15,25 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# latest release
|
||||
TRMM_VERSION = "0.6.14"
|
||||
TRMM_VERSION = "0.8.2"
|
||||
|
||||
# bump this version everytime vue code is changed
|
||||
# to alert user they need to manually refresh their browser
|
||||
APP_VER = "0.0.138"
|
||||
APP_VER = "0.0.144"
|
||||
|
||||
# https://github.com/wh1te909/rmmagent
|
||||
LATEST_AGENT_VER = "1.5.8"
|
||||
LATEST_AGENT_VER = "1.6.1"
|
||||
|
||||
MESH_VER = "0.8.60"
|
||||
MESH_VER = "0.9.16"
|
||||
|
||||
NATS_SERVER_VER = "2.3.3"
|
||||
|
||||
# for the update script, bump when need to recreate venv or npm install
|
||||
PIP_VER = "18"
|
||||
NPM_VER = "17"
|
||||
PIP_VER = "21"
|
||||
NPM_VER = "21"
|
||||
|
||||
SETUPTOOLS_VER = "57.0.0"
|
||||
WHEEL_VER = "0.36.2"
|
||||
SETUPTOOLS_VER = "57.4.0"
|
||||
WHEEL_VER = "0.37.0"
|
||||
|
||||
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
|
||||
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
|
||||
@@ -109,6 +111,7 @@ MIDDLEWARE = [
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"corsheaders.middleware.CorsMiddleware", ##
|
||||
"tacticalrmm.middleware.LogIPMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
@@ -173,12 +176,23 @@ STATIC_URL = "/static/"
|
||||
STATIC_ROOT = os.path.join(BASE_DIR, "static")
|
||||
STATICFILES_DIRS = [os.path.join(BASE_DIR, "tacticalrmm/static/")]
|
||||
|
||||
|
||||
LOG_CONFIG = {
|
||||
"handlers": [{"sink": os.path.join(LOG_DIR, "debug.log"), "serialize": False}]
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"handlers": {
|
||||
"file": {
|
||||
"level": "ERROR",
|
||||
"class": "logging.FileHandler",
|
||||
"filename": os.path.join(LOG_DIR, "django_debug.log"),
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"django.request": {"handlers": ["file"], "level": "ERROR", "propagate": True}
|
||||
},
|
||||
}
|
||||
|
||||
if "AZPIPELINE" in os.environ:
|
||||
print("PIPELINE")
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import uuid
|
||||
from django.test import TestCase, override_settings
|
||||
from model_bakery import baker
|
||||
from rest_framework.authtoken.models import Token
|
||||
@@ -20,6 +21,12 @@ class TacticalTestCase(TestCase):
|
||||
self.client_setup()
|
||||
self.client.force_authenticate(user=self.john)
|
||||
|
||||
User.objects.create_user( # type: ignore
|
||||
username=uuid.uuid4().hex,
|
||||
is_installer_user=True,
|
||||
password=User.objects.make_random_password(60), # type: ignore
|
||||
)
|
||||
|
||||
def setup_agent_auth(self, agent):
|
||||
agent_user = User.objects.create_user(
|
||||
username=agent.agent_id,
|
||||
|
||||
@@ -4,7 +4,8 @@ from unittest.mock import mock_open, patch
|
||||
|
||||
import requests
|
||||
from django.conf import settings
|
||||
from django.test import TestCase, override_settings
|
||||
from django.test import override_settings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .utils import (
|
||||
bitdays_to_string,
|
||||
@@ -16,7 +17,10 @@ from .utils import (
|
||||
)
|
||||
|
||||
|
||||
class TestUtils(TestCase):
|
||||
class TestUtils(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("requests.post")
|
||||
@patch("__main__.__builtins__.open", new_callable=mock_open)
|
||||
def test_generate_winagent_exe_success(self, m_open, mock_post):
|
||||
@@ -77,7 +81,7 @@ class TestUtils(TestCase):
|
||||
@patch("subprocess.run")
|
||||
def test_run_nats_api_cmd(self, mock_subprocess):
|
||||
ids = ["a", "b", "c"]
|
||||
_ = run_nats_api_cmd("monitor", ids)
|
||||
_ = run_nats_api_cmd("wmi", ids)
|
||||
mock_subprocess.assert_called_once()
|
||||
|
||||
def test_bitdays_to_string(self):
|
||||
|
||||
@@ -15,14 +15,12 @@ from django.conf import settings
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.http import FileResponse
|
||||
from knox.auth import TokenAuthentication
|
||||
from loguru import logger
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CodeSignToken
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import DebugLog
|
||||
from agents.models import Agent
|
||||
|
||||
notify_error = lambda msg: Response(msg, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -61,7 +59,7 @@ def generate_winagent_exe(
|
||||
)
|
||||
|
||||
try:
|
||||
codetoken = CodeSignToken.objects.first().token
|
||||
codetoken = CodeSignToken.objects.first().token # type:ignore
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
@@ -107,7 +105,7 @@ def generate_winagent_exe(
|
||||
break
|
||||
|
||||
if errors:
|
||||
logger.error(errors)
|
||||
DebugLog.error(message=errors)
|
||||
return notify_error(
|
||||
"Something went wrong. Check debug error log for exact error message"
|
||||
)
|
||||
@@ -123,7 +121,7 @@ def generate_winagent_exe(
|
||||
def get_default_timezone():
|
||||
from core.models import CoreSettings
|
||||
|
||||
return pytz.timezone(CoreSettings.objects.first().default_time_zone)
|
||||
return pytz.timezone(CoreSettings.objects.first().default_time_zone) # type:ignore
|
||||
|
||||
|
||||
def get_bit_days(days: list[str]) -> int:
|
||||
@@ -178,28 +176,28 @@ def filter_software(sw: SoftwareList) -> SoftwareList:
|
||||
|
||||
def reload_nats():
|
||||
users = [{"user": "tacticalrmm", "password": settings.SECRET_KEY}]
|
||||
agents = Agent.objects.prefetch_related("user").only("pk", "agent_id")
|
||||
agents = Agent.objects.prefetch_related("user").only(
|
||||
"pk", "agent_id"
|
||||
) # type:ignore
|
||||
for agent in agents:
|
||||
try:
|
||||
users.append(
|
||||
{"user": agent.agent_id, "password": agent.user.auth_token.key}
|
||||
)
|
||||
except:
|
||||
logger.critical(
|
||||
f"{agent.hostname} does not have a user account, NATS will not work"
|
||||
DebugLog.critical(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} does not have a user account, NATS will not work",
|
||||
)
|
||||
|
||||
domain = settings.ALLOWED_HOSTS[0].split(".", 1)[1]
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
if hasattr(settings, "CERT_FILE") and hasattr(settings, "KEY_FILE"):
|
||||
if os.path.exists(settings.CERT_FILE) and os.path.exists(settings.KEY_FILE):
|
||||
cert_file = settings.CERT_FILE
|
||||
key_file = settings.KEY_FILE
|
||||
else:
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
else:
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
|
||||
config = {
|
||||
"tls": {
|
||||
@@ -207,7 +205,7 @@ def reload_nats():
|
||||
"key_file": key_file,
|
||||
},
|
||||
"authorization": {"users": users},
|
||||
"max_payload": 2048576005,
|
||||
"max_payload": 67108864,
|
||||
}
|
||||
|
||||
conf = os.path.join(settings.BASE_DIR, "nats-rmm.conf")
|
||||
@@ -248,21 +246,36 @@ KnoxAuthMiddlewareStack = lambda inner: KnoxAuthMiddlewareInstance(
|
||||
)
|
||||
|
||||
|
||||
def run_nats_api_cmd(mode: str, ids: list[str], timeout: int = 30) -> None:
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"agents": ids,
|
||||
}
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
def run_nats_api_cmd(mode: str, ids: list[str] = [], timeout: int = 30) -> None:
|
||||
if mode == "wmi":
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"agents": ids,
|
||||
}
|
||||
else:
|
||||
db = settings.DATABASES["default"]
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"user": db["USER"],
|
||||
"pass": db["PASSWORD"],
|
||||
"host": db["HOST"],
|
||||
"port": int(db["PORT"]),
|
||||
"dbname": db["NAME"],
|
||||
}
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
dir="/opt/tactical/tmp" if settings.DOCKER_BUILD else None
|
||||
) as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", mode]
|
||||
try:
|
||||
subprocess.run(cmd, capture_output=True, timeout=timeout)
|
||||
subprocess.run(cmd, timeout=timeout)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(message=e)
|
||||
|
||||
|
||||
def get_latest_trmm_ver() -> str:
|
||||
@@ -277,15 +290,16 @@ def get_latest_trmm_ver() -> str:
|
||||
if "TRMM_VERSION" in line:
|
||||
return line.split(" ")[2].strip('"')
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(message=e)
|
||||
|
||||
return "error"
|
||||
|
||||
|
||||
def replace_db_values(
|
||||
string: str, agent: Agent = None, shell: str = None, quotes=True
|
||||
string: str, instance=None, shell: str = None, quotes=True # type:ignore
|
||||
) -> Union[str, None]:
|
||||
from core.models import CustomField, GlobalKVStore
|
||||
from clients.models import Client, Site
|
||||
|
||||
# split by period if exists. First should be model and second should be property i.e {{client.name}}
|
||||
temp = string.split(".")
|
||||
@@ -293,7 +307,7 @@ def replace_db_values(
|
||||
# check for model and property
|
||||
if len(temp) < 2:
|
||||
# ignore arg since it is invalid
|
||||
return None
|
||||
return ""
|
||||
|
||||
# value is in the global keystore and replace value
|
||||
if temp[0] == "global":
|
||||
@@ -302,30 +316,48 @@ def replace_db_values(
|
||||
|
||||
return f"'{value}'" if quotes else value
|
||||
else:
|
||||
logger.error(
|
||||
f"Couldn't lookup value for: {string}. Make sure it exists in CoreSettings > Key Store"
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f"{agent.hostname} Couldn't lookup value for: {string}. Make sure it exists in CoreSettings > Key Store", # type:ignore
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
if not agent:
|
||||
# agent must be set if not global property
|
||||
return f"There was an error finding the agent: {agent}"
|
||||
if not instance:
|
||||
# instance must be set if not global property
|
||||
return ""
|
||||
|
||||
if temp[0] == "client":
|
||||
model = "client"
|
||||
obj = agent.client
|
||||
if isinstance(instance, Client):
|
||||
obj = instance
|
||||
elif hasattr(instance, "client"):
|
||||
obj = instance.client
|
||||
else:
|
||||
obj = None
|
||||
elif temp[0] == "site":
|
||||
model = "site"
|
||||
obj = agent.site
|
||||
if isinstance(instance, Site):
|
||||
obj = instance
|
||||
elif hasattr(instance, "site"):
|
||||
obj = instance.site
|
||||
else:
|
||||
obj = None
|
||||
elif temp[0] == "agent":
|
||||
model = "agent"
|
||||
obj = agent
|
||||
if isinstance(instance, Agent):
|
||||
obj = instance
|
||||
else:
|
||||
obj = None
|
||||
else:
|
||||
# ignore arg since it is invalid
|
||||
logger.error(
|
||||
f"Not enough information to find value for: {string}. Only agent, site, client, and global are supported."
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f"{instance} Not enough information to find value for: {string}. Only agent, site, client, and global are supported.",
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
if not obj:
|
||||
return ""
|
||||
|
||||
if hasattr(obj, temp[1]):
|
||||
value = f"'{getattr(obj, temp[1])}'" if quotes else getattr(obj, temp[1])
|
||||
@@ -359,19 +391,21 @@ def replace_db_values(
|
||||
|
||||
else:
|
||||
# ignore arg since property is invalid
|
||||
logger.error(
|
||||
f"Couldn't find property on supplied variable: {string}. Make sure it exists as a custom field or a valid agent property"
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f"{instance} Couldn't find property on supplied variable: {string}. Make sure it exists as a custom field or a valid agent property",
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
# log any unhashable type errors
|
||||
if value != None:
|
||||
return value # type: ignore
|
||||
else:
|
||||
logger.error(
|
||||
f"Couldn't lookup value for: {string}. Make sure it exists as a custom field or a valid agent property"
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f" {instance}({instance.pk}) Couldn't lookup value for: {string}. Make sure it exists as a custom field or a valid agent property",
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
|
||||
def format_shell_array(value: list) -> str:
|
||||
|
||||
@@ -3,15 +3,12 @@ import datetime as dt
|
||||
import time
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import DebugLog
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -46,7 +43,13 @@ def auto_approve_updates_task():
|
||||
def check_agent_update_schedule_task():
|
||||
# scheduled task that installs updates on agents if enabled
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
|
||||
"pk",
|
||||
"agent_id",
|
||||
"version",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
"has_patches_pending",
|
||||
)
|
||||
online = [
|
||||
i
|
||||
@@ -114,7 +117,11 @@ def check_agent_update_schedule_task():
|
||||
|
||||
if install:
|
||||
# initiate update on agent asynchronously and don't worry about ret code
|
||||
logger.info(f"Installing windows updates on {agent.salt_id}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="windows_updates",
|
||||
message=f"Installing windows updates on {agent.hostname}",
|
||||
)
|
||||
nats_data = {
|
||||
"func": "installwinupdates",
|
||||
"guids": agent.get_approved_update_guids(),
|
||||
|
||||
@@ -8,7 +8,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
Debian10:
|
||||
AGENT_NAME: "azpipelines-deb10"
|
||||
AGENT_NAME: "az-pipeline-fran"
|
||||
|
||||
pool:
|
||||
name: linux-vms
|
||||
@@ -20,15 +20,18 @@ jobs:
|
||||
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS pipeline'
|
||||
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS test_pipeline'
|
||||
sudo -u postgres psql -c 'CREATE DATABASE pipeline'
|
||||
|
||||
sudo -u postgres psql -c "SET client_encoding = 'UTF8'" pipeline
|
||||
SETTINGS_FILE="/myagent/_work/1/s/api/tacticalrmm/tacticalrmm/settings.py"
|
||||
rm -rf /myagent/_work/1/s/api/env
|
||||
cd /myagent/_work/1/s/api
|
||||
python3.9 -m venv env
|
||||
source env/bin/activate
|
||||
cd /myagent/_work/1/s/api/tacticalrmm
|
||||
pip install --no-cache-dir --upgrade pip
|
||||
pip install --no-cache-dir setuptools==54.2.0 wheel==0.36.2
|
||||
pip install --no-cache-dir -r requirements.txt -r requirements-test.txt -r requirements-dev.txt
|
||||
pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org --upgrade pip
|
||||
SETUPTOOLS_VER=$(grep "^SETUPTOOLS_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
|
||||
WHEEL_VER=$(grep "^WHEEL_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
|
||||
pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org setuptools==${SETUPTOOLS_VER} wheel==${WHEEL_VER}
|
||||
pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org -r requirements.txt -r requirements-test.txt -r requirements-dev.txt
|
||||
displayName: "Install Python Dependencies"
|
||||
|
||||
- script: |
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="13"
|
||||
SCRIPT_VERSION="15"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
@@ -73,14 +73,14 @@ sudo tar -czvf ${tmp_dir}/nginx/etc-nginx.tar.gz -C /etc/nginx .
|
||||
|
||||
sudo tar -czvf ${tmp_dir}/confd/etc-confd.tar.gz -C /etc/conf.d .
|
||||
|
||||
sudo tar -czvf ${tmp_dir}/redis/etc-redis.tar.gz -C /var/lib/redis/appendonly.aof
|
||||
sudo gzip -9 -c /var/lib/redis/appendonly.aof > ${tmp_dir}/redis/appendonly.aof.gz
|
||||
|
||||
sudo cp ${sysd}/rmm.service ${sysd}/celery.service ${sysd}/celerybeat.service ${sysd}/meshcentral.service ${sysd}/nats.service ${tmp_dir}/systemd/
|
||||
if [ -f "${sysd}/daphne.service" ]; then
|
||||
sudo cp ${sysd}/daphne.service ${tmp_dir}/systemd/
|
||||
fi
|
||||
|
||||
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
|
||||
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/django_debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
|
||||
cp /rmm/api/tacticalrmm/tacticalrmm/local_settings.py /rmm/api/tacticalrmm/app.ini ${tmp_dir}/rmm/
|
||||
cp /rmm/web/.env ${tmp_dir}/rmm/env
|
||||
cp /rmm/api/tacticalrmm/tacticalrmm/private/exe/mesh*.exe ${tmp_dir}/rmm/
|
||||
|
||||
@@ -15,6 +15,7 @@ MESH_USER=tactical
|
||||
MESH_PASS=tactical
|
||||
MONGODB_USER=mongouser
|
||||
MONGODB_PASSWORD=mongopass
|
||||
MESH_PERSISTENT_CONFIG=0
|
||||
|
||||
# database settings
|
||||
POSTGRES_USER=postgres
|
||||
|
||||
@@ -9,14 +9,19 @@ set -e
|
||||
: "${MONGODB_HOST:=tactical-mongodb}"
|
||||
: "${MONGODB_PORT:=27017}"
|
||||
: "${NGINX_HOST_IP:=172.20.0.20}"
|
||||
: "${MESH_PERSISTENT_CONFIG:=0}"
|
||||
|
||||
mkdir -p /home/node/app/meshcentral-data
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
|
||||
if [ ! -f "/home/node/app/meshcentral-data/config.json" ] || [[ "${MESH_PERSISTENT_CONFIG}" -eq 0 ]]; then
|
||||
|
||||
encoded_uri=$(node -p "encodeURI('mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@${MONGODB_HOST}:${MONGODB_PORT}')")
|
||||
|
||||
mesh_config="$(cat << EOF
|
||||
{
|
||||
"settings": {
|
||||
"mongodb": "mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@${MONGODB_HOST}:${MONGODB_PORT}",
|
||||
"mongodb": "${encoded_uri}",
|
||||
"Cert": "${MESH_HOST}",
|
||||
"TLSOffload": "${NGINX_HOST_IP}",
|
||||
"RedirPort": 80,
|
||||
@@ -54,11 +59,19 @@ EOF
|
||||
|
||||
echo "${mesh_config}" > /home/node/app/meshcentral-data/config.json
|
||||
|
||||
fi
|
||||
|
||||
node node_modules/meshcentral --createaccount ${MESH_USER} --pass ${MESH_PASS} --email example@example.com
|
||||
node node_modules/meshcentral --adminaccount ${MESH_USER}
|
||||
|
||||
if [ ! -f "${TACTICAL_DIR}/tmp/mesh_token" ]; then
|
||||
node node_modules/meshcentral --logintokenkey > ${TACTICAL_DIR}/tmp/mesh_token
|
||||
mesh_token=$(node node_modules/meshcentral --logintokenkey)
|
||||
|
||||
if [[ ${#mesh_token} -eq 160 ]]; then
|
||||
echo ${mesh_token} > /opt/tactical/tmp/mesh_token
|
||||
else
|
||||
echo "Failed to generate mesh token. Fix the error and restart the mesh container"
|
||||
fi
|
||||
fi
|
||||
|
||||
# wait for nginx container
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM nats:2.2.6-alpine
|
||||
FROM nats:2.3.3-alpine
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# creates python virtual env
|
||||
FROM python:3.9.2-slim AS CREATE_VENV_STAGE
|
||||
FROM python:3.9.6-slim AS CREATE_VENV_STAGE
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
@@ -24,7 +24,7 @@ RUN apt-get update && \
|
||||
|
||||
|
||||
# runtime image
|
||||
FROM python:3.9.2-slim
|
||||
FROM python:3.9.6-slim
|
||||
|
||||
# set env variables
|
||||
ENV VIRTUAL_ENV /opt/venv
|
||||
|
||||
@@ -124,6 +124,7 @@ EOF
|
||||
python manage.py load_chocos
|
||||
python manage.py load_community_scripts
|
||||
python manage.py reload_nats
|
||||
python manage.py create_installer_user
|
||||
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
|
||||
@@ -97,6 +97,7 @@ services:
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
MONGODB_USER: ${MONGODB_USER}
|
||||
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||
MESH_PERSISTENT_CONFIG: ${MESH_PERSISTENT_CONFIG}
|
||||
networks:
|
||||
proxy:
|
||||
aliases:
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
||||
# tactical tactical-frontend tactical-nats tactical-nginx
|
||||
DOCKER_IMAGES="tactical tactical-frontend tactical-nats tactical-nginx tactical-meshcentral"
|
||||
|
||||
cd ..
|
||||
|
||||
46
docs/docs/3rdparty_anydesk.md
Normal file
46
docs/docs/3rdparty_anydesk.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# AnyDesk
|
||||
|
||||
## AnyDesk Integration
|
||||
|
||||
!!!info
|
||||
You can setup a full automation policy to collect the machine GUID but this example will collect from just one agent for testing purposes.
|
||||
|
||||
From the UI go to **Settings > Global Settings > CUSTOM FIELDS > Agents**
|
||||
|
||||
Add Custom Field</br>
|
||||
**Target** = `Agent`</br>
|
||||
**Name** = `AnyNetID`</br>
|
||||
**Field Type** = `Text`</br>
|
||||
|
||||

|
||||
|
||||
While in Global Settings go to **URL ACTIONS**
|
||||
|
||||
Add a URL Action</br>
|
||||
**Name** = `AnyDesk Control`</br>
|
||||
**Description** = `Connect to a AnyDesk Session`</br>
|
||||
**URL Pattern** =
|
||||
|
||||
```html
|
||||
anydesk:{{agent.AnyNetID}}
|
||||
```
|
||||
|
||||
Navigate to an agent with AnyDesk running (or apply using **Settings > Automation Manager**).</br>
|
||||
Go to Tasks.</br>
|
||||
Add Task</br>
|
||||
**Select Script** = `AnyDesk - Get AnyNetID for client` (this is a builtin script from script library)</br>
|
||||
**Descriptive name of task** = `Collects the AnyNetID for AnyDesk.`</br>
|
||||
**Collector Task** = `CHECKED`</br>
|
||||
**Custom Field to update** = `AnyNetID`</br>
|
||||
|
||||

|
||||
|
||||
Click **Next**</br>
|
||||
Check **Manual**</br>
|
||||
Click **Add Task**
|
||||
|
||||
Right click on the newly created task and click **Run Task Now**.
|
||||
|
||||
Give it a second to execute then right click the agent that you are working with and go to **Run URL Action > AnyDesk Control**
|
||||
|
||||
It launch the session in AnyDesk.
|
||||
34
docs/docs/3rdparty_bitdefender_gravityzone.md
Normal file
34
docs/docs/3rdparty_bitdefender_gravityzone.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# BitDefender GravityZone Deployment
|
||||
|
||||
## How to Deploy BitDefender GravityZone
|
||||
|
||||
From the UI go to **Settings > Global Settings > CUSTOM FIELDS > Clients**
|
||||
|
||||
Add a Custom Field</br>
|
||||
|
||||
First: </br>
|
||||
**Target** = `CLIENTS`</br>
|
||||
**Name** = `bdurl`</br>
|
||||
**Field Type** = `Text`</br>
|
||||
|
||||

|
||||
|
||||
Log into your GravityZone and on the left hand side, select "Packages" under "Network".
|
||||
|
||||

|
||||
|
||||
Select the client you are working with and click "Send Download Links" at the top. </br>
|
||||
|
||||

|
||||
|
||||
Copy the appropriate download link
|
||||
|
||||

|
||||
|
||||
Paste download link into the `bdurl` when you right click your target clients name in the RMM.
|
||||
|
||||

|
||||
|
||||
Right click the Agent you want to deploy to and **Run Script**. Select **BitDefender GravityZone Install** and set timeout for 1800 seconds.
|
||||
|
||||
**Install time will vary based on internet speed and other AV removal by BitDefender BEST deployment**
|
||||
9
docs/docs/3rdparty_grafana.md
Normal file
9
docs/docs/3rdparty_grafana.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Adding Grafana to Tactical RMM
|
||||
|
||||
Adding graphical Dashboards to Tactical.
|
||||
|
||||
See <https://github.com/dinger1986/TRMM-Grafana>
|
||||
|
||||

|
||||
|
||||

|
||||
103
docs/docs/3rdparty_screenconnect.md
Normal file
103
docs/docs/3rdparty_screenconnect.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# Screenconnect / Connectwise Control
|
||||
|
||||
## Connectwise Control Integration
|
||||
|
||||
!!!info
|
||||
To make this work you will need the name of a the Service from one of your agents running a Screen Connect Guest.
|
||||
|
||||
!!!info
|
||||
You can setup a full automation policy to collect the machine GUID but this example will collect from just one agent for testing purposes.
|
||||
|
||||
From the UI go to **Settings > Global Settings > CUSTOM FIELDS > Agents**
|
||||
|
||||
Add Custom Field</br>
|
||||
**Target** = `Client`</br>
|
||||
**Name** = `ScreenConnectService`</br>
|
||||
**Field Type** = `Text` </br>
|
||||
**Default Value** = `The name of your SC Service eg. ScreenConnect Client (XXXXXXXXXXXXXXXXX)`</br>
|
||||
|
||||

|
||||
|
||||
Add Custom Field</br>
|
||||
**Target** = `Agent`</br>
|
||||
**Name** = `ScreenConnectGUID`</br>
|
||||
**Field Type** = `Text`</br>
|
||||
|
||||

|
||||
|
||||
While in Global Settings go to **URL ACTIONS**
|
||||
|
||||
Add a URL Action</br>
|
||||
**Name** = `ScreenConnect`</br>
|
||||
**Description** = `Launch Screen Connect Session`</br>
|
||||
**URL Pattern** =
|
||||
|
||||
```html
|
||||
https://<your_screenconnect_fqdn_with_port>/Host#Access/All%20Machines//{{agent.ScreenConnectGUID}}/Join
|
||||
```
|
||||
|
||||

|
||||
|
||||
Navigate to an agent with ConnectWise Service running (or apply using **Settings > Automation Manager**).</br>
|
||||
Go to Tasks.</br>
|
||||
Add Task</br>
|
||||
**Select Script** = `ScreenConnect - Get GUID for client` (this is a builtin script from script library)</br>
|
||||
**Script argument** = `-serviceName{{client.ScreenConnectService}}`</br>
|
||||
**Descriptive name of task** = `Collects the Machine GUID for ScreenConnect.`</br>
|
||||
**Collector Task** = `CHECKED`</br>
|
||||
**Custom Field to update** = `ScreenConectGUID`</br>
|
||||
|
||||

|
||||
|
||||
Click **Next**</br>
|
||||
Check **Manual**</br>
|
||||
Click **Add Task**
|
||||
|
||||
Right click on the newly created task and click **Run Task Now**.
|
||||
|
||||
Give it a second to execute then right click the agent that you are working with and go to **Run URL Action > ScreenConnect**
|
||||
|
||||
It should ask you to sign into your Connectwise Control server if you are not already logged in and launch the session.
|
||||
|
||||
*****
|
||||
|
||||
## Install Tactical RMM via Screeconnect commands window
|
||||
|
||||
1. Create a Deplopment under **Agents > Manage Deployments**
|
||||
2. Replace `<deployment URL>` below with your Deployment Download Link.
|
||||
|
||||
**x64**
|
||||
|
||||
```cmd
|
||||
#!ps
|
||||
#maxlength=500000
|
||||
#timeout=600000
|
||||
|
||||
Invoke-WebRequest "<deployment URL>" -OutFile ( New-Item -Path "C:\temp\trmminstallx64.exe" -Force )
|
||||
$proc = Start-Process c:\temp\trmminstallx64.exe -ArgumentList '-silent' -PassThru
|
||||
Wait-Process -InputObject $proc
|
||||
|
||||
if ($proc.ExitCode -ne 0) {
|
||||
Write-Warning "$_ exited with status code $($proc.ExitCode)"
|
||||
}
|
||||
Remove-Item -Path "c:\temp\trmminstallx64.exe" -Force
|
||||
```
|
||||
|
||||
**x86**
|
||||
|
||||
```cmd
|
||||
#!ps
|
||||
#maxlength=500000
|
||||
#timeout=600000
|
||||
|
||||
Invoke-WebRequest "<deployment URL>" -OutFile ( New-Item -Path "C:\temp\trmminstallx86.exe" -Force )
|
||||
$proc = Start-Process c:\temp\trmminstallx86.exe -ArgumentList '-silent' -PassThru
|
||||
Wait-Process -InputObject $proc
|
||||
|
||||
if ($proc.ExitCode -ne 0) {
|
||||
Write-Warning "$_ exited with status code $($proc.ExitCode)"
|
||||
}
|
||||
Remove-Item -Path "c:\temp\trmminstallx86.exe" -Force
|
||||
```
|
||||
|
||||
*****
|
||||
46
docs/docs/3rdparty_teamviewer.md
Normal file
46
docs/docs/3rdparty_teamviewer.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# TeamViewer
|
||||
|
||||
## TeamViewer Integration
|
||||
|
||||
!!!info
|
||||
You can setup a full automation policy to collect the machine GUID but this example will collect from just one agent for testing purposes.
|
||||
|
||||
From the UI go to **Settings > Global Settings > CUSTOM FIELDS > Agents**
|
||||
|
||||
Add Custom Field</br>
|
||||
**Target** = `Agent`</br>
|
||||
**Name** = `TeamViewerClientID`</br>
|
||||
**Field Type** = `Text`</br>
|
||||
|
||||

|
||||
|
||||
While in Global Settings go to **URL ACTIONS**
|
||||
|
||||
Add a URL Action</br>
|
||||
**Name** = `TeamViewer Control`</br>
|
||||
**Description** = `Connect to a Team Viewer Session`</br>
|
||||
**URL Pattern** =
|
||||
|
||||
```html
|
||||
https://start.teamviewer.com/device/{{agent.TeamViewerClientID}}/authorization/password/mode/control
|
||||
```
|
||||
|
||||
Navigate to an agent with TeamViewer running (or apply using **Settings > Automation Manager**).</br>
|
||||
Go to Tasks.</br>
|
||||
Add Task</br>
|
||||
**Select Script** = `TeamViewer - Get ClientID for client` (this is a builtin script from script library)</br>
|
||||
**Descriptive name of task** = `Collects the ClientID for TeamViewer.`</br>
|
||||
**Collector Task** = `CHECKED`</br>
|
||||
**Custom Field to update** = `TeamViewerClientID`</br>
|
||||
|
||||

|
||||
|
||||
Click **Next**</br>
|
||||
Check **Manual**</br>
|
||||
Click **Add Task**
|
||||
|
||||
Right click on the newly created task and click **Run Task Now**.
|
||||
|
||||
Give it a second to execute then right click the agent that you are working with and go to **Run URL Action > TeamViewer Control**
|
||||
|
||||
It launch the session and possibly promt for password in TeamViewer.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user