Compare commits
217 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9011148adf | ||
|
|
897d0590d2 | ||
|
|
33b33e8458 | ||
|
|
7758f5c187 | ||
|
|
a9a0df9699 | ||
|
|
216a9ed035 | ||
|
|
35d61b6a6c | ||
|
|
5fb72cea53 | ||
|
|
d54d021e9f | ||
|
|
06e78311df | ||
|
|
df720f95ca | ||
|
|
00faff34d3 | ||
|
|
2b5b3ea4f3 | ||
|
|
95e608d0b4 | ||
|
|
1d55bf87dd | ||
|
|
1220ce53eb | ||
|
|
2006218f87 | ||
|
|
40f427a387 | ||
|
|
445e95baed | ||
|
|
67fbc9ad33 | ||
|
|
1253e9e465 | ||
|
|
21069432e8 | ||
|
|
6facf6a324 | ||
|
|
7556197485 | ||
|
|
8dddd2d896 | ||
|
|
f319c95c2b | ||
|
|
8e972b0907 | ||
|
|
395e400215 | ||
|
|
3685e3111f | ||
|
|
7bb1c75dc6 | ||
|
|
b20834929c | ||
|
|
181891757e | ||
|
|
b16feeae44 | ||
|
|
684e049f27 | ||
|
|
8cebd901b2 | ||
|
|
3c96beb8fb | ||
|
|
8a46459cf9 | ||
|
|
be5c3e9daa | ||
|
|
e44453877c | ||
|
|
f772a4ec56 | ||
|
|
44182ec683 | ||
|
|
b9ab13fa53 | ||
|
|
2ad6721c95 | ||
|
|
b7d0604e62 | ||
|
|
a7518b4b26 | ||
|
|
50613f5d3e | ||
|
|
f814767703 | ||
|
|
4af86d6456 | ||
|
|
f0a4f00c2d | ||
|
|
4321affddb | ||
|
|
926ed55b9b | ||
|
|
2ebf308565 | ||
|
|
1c5e736dce | ||
|
|
b591f9f5b7 | ||
|
|
9724882578 | ||
|
|
ddef2df101 | ||
|
|
8af69c4284 | ||
|
|
6ebe1ab467 | ||
|
|
24e4d9cf6d | ||
|
|
f35fa0aa58 | ||
|
|
4942f262f1 | ||
|
|
a20b1a973e | ||
|
|
eae5e00706 | ||
|
|
403762d862 | ||
|
|
5c92d4b454 | ||
|
|
38179b9d38 | ||
|
|
8f510dde5a | ||
|
|
be42d56e37 | ||
|
|
6294530fa3 | ||
|
|
c5c8f5fab1 | ||
|
|
3d41d79078 | ||
|
|
3005061a11 | ||
|
|
65ea46f457 | ||
|
|
eca8f32570 | ||
|
|
8d1ef19c61 | ||
|
|
71d87d866b | ||
|
|
c4f88bdce7 | ||
|
|
f722a115b1 | ||
|
|
1583beea7b | ||
|
|
5b388c587b | ||
|
|
e254923167 | ||
|
|
b0dbdd7803 | ||
|
|
aa6ebe0122 | ||
|
|
c5f179bab8 | ||
|
|
e65cb86638 | ||
|
|
a349998640 | ||
|
|
43f60610b8 | ||
|
|
46d042087a | ||
|
|
ee214727f6 | ||
|
|
b4c1ec55ec | ||
|
|
0fdd54f710 | ||
|
|
4f0cdeaec0 | ||
|
|
e5cc38857c | ||
|
|
fe4b9d71c0 | ||
|
|
5c1181e40e | ||
|
|
8b71832bc2 | ||
|
|
8412ed6065 | ||
|
|
207f6cdc7c | ||
|
|
b0b51f5730 | ||
|
|
def6833ef0 | ||
|
|
c528dd3de1 | ||
|
|
544270e35d | ||
|
|
657e029fee | ||
|
|
49469d7689 | ||
|
|
4f0dd452c8 | ||
|
|
3f741eab11 | ||
|
|
190368788f | ||
|
|
8306a3f566 | ||
|
|
988c134c09 | ||
|
|
af0a4d578b | ||
|
|
9bc0abc831 | ||
|
|
41410e99e7 | ||
|
|
deae04d5ff | ||
|
|
7d6eeffd66 | ||
|
|
629858e095 | ||
|
|
dfdb628347 | ||
|
|
6e48b28fc9 | ||
|
|
3ba450e837 | ||
|
|
688ed93500 | ||
|
|
7268ba20a2 | ||
|
|
63d9e73098 | ||
|
|
564c048f90 | ||
|
|
5f801c74d5 | ||
|
|
b405fbc09a | ||
|
|
7a64c2eb49 | ||
|
|
c93cbac3b1 | ||
|
|
8b0f67b8a6 | ||
|
|
0d96129f2d | ||
|
|
54ee12d2b3 | ||
|
|
92fc042103 | ||
|
|
9bb7016fa7 | ||
|
|
3ad56feafb | ||
|
|
14d59c3dec | ||
|
|
443f419770 | ||
|
|
ddbb58755e | ||
|
|
524283b9ff | ||
|
|
fb178d2944 | ||
|
|
52f4ad9403 | ||
|
|
ba0c08ef1f | ||
|
|
9e19b1e04c | ||
|
|
b2118201b1 | ||
|
|
b4346aa056 | ||
|
|
b599f05aab | ||
|
|
93d78a0200 | ||
|
|
449957b2eb | ||
|
|
0a6d44bad3 | ||
|
|
17ceaaa503 | ||
|
|
d70803b416 | ||
|
|
aa414d4702 | ||
|
|
f24e1b91ea | ||
|
|
1df8163090 | ||
|
|
659ddf6a45 | ||
|
|
e110068da4 | ||
|
|
c943f6f936 | ||
|
|
cb1fe7fe54 | ||
|
|
593f1f63cc | ||
|
|
66aa70cf75 | ||
|
|
304be99067 | ||
|
|
9a01ec35f4 | ||
|
|
bfa5b4fba5 | ||
|
|
d2f63ef353 | ||
|
|
50f334425e | ||
|
|
f78212073c | ||
|
|
5c655f5a82 | ||
|
|
6a6446bfcb | ||
|
|
b60a3a5e50 | ||
|
|
02ccbab8e5 | ||
|
|
023ff3f964 | ||
|
|
7c5e8df3b8 | ||
|
|
56fdab260b | ||
|
|
7cce49dc1a | ||
|
|
2dfaafb20b | ||
|
|
6138a5bf54 | ||
|
|
828c67cc00 | ||
|
|
e70cd44e18 | ||
|
|
efa5ac5edd | ||
|
|
788b11e759 | ||
|
|
d049d7a61f | ||
|
|
075c833b58 | ||
|
|
e9309c2a96 | ||
|
|
a592d2b397 | ||
|
|
3ad1805ac0 | ||
|
|
dbc2bab698 | ||
|
|
79eec5c299 | ||
|
|
7754b0c575 | ||
|
|
be4289ce76 | ||
|
|
67f5226270 | ||
|
|
b6d77c581b | ||
|
|
d84bf47d04 | ||
|
|
aba3a7bb9e | ||
|
|
6281736d89 | ||
|
|
94d96f89d3 | ||
|
|
4b55f9dead | ||
|
|
5c6dce94df | ||
|
|
f7d8f9c7f5 | ||
|
|
053df24f9c | ||
|
|
1dc470e434 | ||
|
|
cfd8773267 | ||
|
|
67045cf6c1 | ||
|
|
ddfb9e7239 | ||
|
|
9f6eed5472 | ||
|
|
15a1e2ebcb | ||
|
|
fcfe450b07 | ||
|
|
a69bbb3bc9 | ||
|
|
6d2559cfc1 | ||
|
|
b3a62615f3 | ||
|
|
57f5cca1cb | ||
|
|
6b9851f540 | ||
|
|
36fd203a88 | ||
|
|
3f5cb5d61c | ||
|
|
862fc6a946 | ||
|
|
92c386ac0e | ||
|
|
98a11a3645 | ||
|
|
62be0ed936 | ||
|
|
b7de73fd8a | ||
|
|
e2413f1af2 | ||
|
|
0e77d575c4 |
@@ -25,7 +25,8 @@ POSTGRES_PASS=postgrespass
|
||||
# DEV SETTINGS
|
||||
APP_PORT=80
|
||||
API_PORT=80
|
||||
API_PROTOCOL=https://
|
||||
HTTP_PROTOCOL=https
|
||||
DOCKER_NETWORK="172.21.0.0/24"
|
||||
DOCKER_NGINX_IP="172.21.0.20"
|
||||
NATS_PORTS="4222:4222"
|
||||
DOCKER_NETWORK=172.21.0.0/24
|
||||
DOCKER_NGINX_IP=172.21.0.20
|
||||
NATS_PORTS=4222:4222
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.9.2-slim
|
||||
FROM python:3.9.6-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
@@ -13,12 +13,17 @@ EXPOSE 8000 8383 8005
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# Copy Dev python reqs
|
||||
COPY ./requirements.txt /
|
||||
# Copy nats-api file
|
||||
COPY natsapi/bin/nats-api /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/nats-api
|
||||
|
||||
# Copy Docker Entrypoint
|
||||
COPY ./entrypoint.sh /
|
||||
# Copy dev python reqs
|
||||
COPY .devcontainer/requirements.txt /
|
||||
|
||||
# Copy docker entrypoint.sh
|
||||
COPY .devcontainer/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm
|
||||
|
||||
@@ -6,8 +6,8 @@ services:
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
context: ..
|
||||
dockerfile: .devcontainer/api.dockerfile
|
||||
command: ["tactical-api"]
|
||||
environment:
|
||||
API_PORT: ${API_PORT}
|
||||
@@ -127,9 +127,6 @@ services:
|
||||
init-dev:
|
||||
container_name: trmm-init-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
restart: on-failure
|
||||
command: ["tactical-init-dev"]
|
||||
environment:
|
||||
@@ -156,9 +153,6 @@ services:
|
||||
celery-dev:
|
||||
container_name: trmm-celery-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celery-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
@@ -174,9 +168,6 @@ services:
|
||||
celerybeat-dev:
|
||||
container_name: trmm-celerybeat-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celerybeat-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
@@ -192,9 +183,6 @@ services:
|
||||
websockets-dev:
|
||||
container_name: trmm-websockets-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-websockets-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
@@ -221,6 +209,8 @@ services:
|
||||
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||
APP_PORT: ${APP_PORT}
|
||||
API_PORT: ${API_PORT}
|
||||
API_PROTOCOL: ${API_PROTOCOL}
|
||||
DEV: 1
|
||||
networks:
|
||||
dev:
|
||||
ipv4_address: ${DOCKER_NGINX_IP}
|
||||
@@ -234,9 +224,6 @@ services:
|
||||
container_name: trmm-mkdocs-dev
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-mkdocs-dev"]
|
||||
ports:
|
||||
- "8005:8005"
|
||||
|
||||
@@ -78,24 +78,6 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
|
||||
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'knox.auth.TokenAuthentication',
|
||||
),
|
||||
}
|
||||
|
||||
if not DEBUG:
|
||||
REST_FRAMEWORK.update({
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
)
|
||||
})
|
||||
|
||||
MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
|
||||
@@ -3,6 +3,7 @@ asyncio-nats-client
|
||||
celery
|
||||
channels
|
||||
channels_redis
|
||||
django-ipware
|
||||
Django
|
||||
django-cors-headers
|
||||
django-rest-knox
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -48,3 +48,4 @@ nats-rmm.conf
|
||||
.mypy_cache
|
||||
docs/site/
|
||||
reset_db.sh
|
||||
run_go_cmd.py
|
||||
|
||||
@@ -9,7 +9,7 @@ Tactical RMM is a remote monitoring & management tool for Windows computers, bui
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||
|
||||
@@ -35,4 +35,4 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
||||
|
||||
## Installation / Backup / Restore / Usage
|
||||
|
||||
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
|
||||
@@ -15,4 +15,5 @@ class Command(BaseCommand):
|
||||
username=uuid.uuid4().hex,
|
||||
is_installer_user=True,
|
||||
password=User.objects.make_random_password(60), # type: ignore
|
||||
block_dashboard_login=True,
|
||||
)
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-20 20:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0023_user_is_installer_user'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='last_login_ip',
|
||||
field=models.GenericIPAddressField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0024_user_last_login_ip'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-01 12:47
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0025_auto_20210721_0424'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='APIKey',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created_by', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('created_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||
('modified_by', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('modified_time', models.DateTimeField(auto_now=True, null=True)),
|
||||
('name', models.CharField(max_length=25, unique=True)),
|
||||
('key', models.CharField(blank=True, max_length=48, unique=True)),
|
||||
('expiration', models.DateTimeField(blank=True, default=None, null=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_manage_api_keys',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-03 00:54
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0026_auto_20210901_1247'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='apikey',
|
||||
name='user',
|
||||
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='api_key', to='accounts.user'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='block_dashboard_login',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.db import models
|
||||
from django.db.models.fields import CharField, DateTimeField
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
@@ -24,6 +25,7 @@ CLIENT_TREE_SORT_CHOICES = [
|
||||
|
||||
class User(AbstractUser, BaseAuditModel):
|
||||
is_active = models.BooleanField(default=True)
|
||||
block_dashboard_login = models.BooleanField(default=False)
|
||||
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
||||
dark_mode = models.BooleanField(default=True)
|
||||
show_community_scripts = models.BooleanField(default=True)
|
||||
@@ -48,6 +50,7 @@ class User(AbstractUser, BaseAuditModel):
|
||||
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||
clear_search_when_switching = models.BooleanField(default=True)
|
||||
is_installer_user = models.BooleanField(default=False)
|
||||
last_login_ip = models.GenericIPAddressField(default=None, blank=True, null=True)
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
@@ -73,7 +76,7 @@ class User(AbstractUser, BaseAuditModel):
|
||||
return UserSerializer(user).data
|
||||
|
||||
|
||||
class Role(models.Model):
|
||||
class Role(BaseAuditModel):
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
is_superuser = models.BooleanField(default=False)
|
||||
|
||||
@@ -137,9 +140,19 @@ class Role(models.Model):
|
||||
can_manage_accounts = models.BooleanField(default=False)
|
||||
can_manage_roles = models.BooleanField(default=False)
|
||||
|
||||
# authentication
|
||||
can_manage_api_keys = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(role):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import RoleAuditSerializer
|
||||
|
||||
return RoleAuditSerializer(role).data
|
||||
|
||||
@staticmethod
|
||||
def perms():
|
||||
return [
|
||||
@@ -178,4 +191,22 @@ class Role(models.Model):
|
||||
"can_manage_winupdates",
|
||||
"can_manage_accounts",
|
||||
"can_manage_roles",
|
||||
"can_manage_api_keys",
|
||||
]
|
||||
|
||||
|
||||
class APIKey(BaseAuditModel):
|
||||
name = CharField(unique=True, max_length=25)
|
||||
key = CharField(unique=True, blank=True, max_length=48)
|
||||
expiration = DateTimeField(blank=True, null=True, default=None)
|
||||
user = models.ForeignKey(
|
||||
"accounts.User",
|
||||
related_name="api_key",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def serialize(apikey):
|
||||
from .serializers import APIKeyAuditSerializer
|
||||
|
||||
return APIKeyAuditSerializer(apikey).data
|
||||
|
||||
@@ -8,6 +8,21 @@ class AccountsPerms(permissions.BasePermission):
|
||||
if r.method == "GET":
|
||||
return True
|
||||
|
||||
# allow users to reset their own password/2fa see issue #686
|
||||
base_path = "/accounts/users/"
|
||||
paths = ["reset/", "reset_totp/"]
|
||||
|
||||
if r.path in [base_path + i for i in paths]:
|
||||
from accounts.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(pk=r.data["id"])
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
else:
|
||||
if user == r.user:
|
||||
return True
|
||||
|
||||
return _has_perm(r, "can_manage_accounts")
|
||||
|
||||
|
||||
@@ -17,3 +32,9 @@ class RolesPerms(permissions.BasePermission):
|
||||
return True
|
||||
|
||||
return _has_perm(r, "can_manage_roles")
|
||||
|
||||
|
||||
class APIKeyPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
|
||||
return _has_perm(r, "can_manage_api_keys")
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import pyotp
|
||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
SerializerMethodField,
|
||||
ReadOnlyField,
|
||||
)
|
||||
|
||||
from .models import User, Role
|
||||
from .models import APIKey, User, Role
|
||||
|
||||
|
||||
class UserUISerializer(ModelSerializer):
|
||||
@@ -17,6 +21,7 @@ class UserUISerializer(ModelSerializer):
|
||||
"client_tree_splitter",
|
||||
"loading_bar_color",
|
||||
"clear_search_when_switching",
|
||||
"block_dashboard_login",
|
||||
]
|
||||
|
||||
|
||||
@@ -31,7 +36,9 @@ class UserSerializer(ModelSerializer):
|
||||
"email",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"last_login_ip",
|
||||
"role",
|
||||
"block_dashboard_login",
|
||||
]
|
||||
|
||||
|
||||
@@ -57,3 +64,30 @@ class RoleSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class RoleAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class APIKeySerializer(ModelSerializer):
|
||||
|
||||
username = ReadOnlyField(source="user.username")
|
||||
|
||||
class Meta:
|
||||
model = APIKey
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class APIKeyAuditSerializer(ModelSerializer):
|
||||
username = ReadOnlyField(source="user.username")
|
||||
|
||||
class Meta:
|
||||
model = APIKey
|
||||
fields = [
|
||||
"name",
|
||||
"username",
|
||||
"expiration",
|
||||
]
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import override_settings
|
||||
|
||||
from accounts.models import User
|
||||
from model_bakery import baker, seq
|
||||
from accounts.models import User, APIKey
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from accounts.serializers import APIKeySerializer
|
||||
|
||||
|
||||
class TestAccounts(TacticalTestCase):
|
||||
def setUp(self):
|
||||
@@ -39,6 +41,12 @@ class TestAccounts(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "ok")
|
||||
|
||||
# test user set to block dashboard logins
|
||||
self.bob.block_dashboard_login = True
|
||||
self.bob.save()
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@patch("pyotp.TOTP.verify")
|
||||
def test_login_view(self, mock_verify):
|
||||
url = "/login/"
|
||||
@@ -288,6 +296,68 @@ class TestUserAction(TacticalTestCase):
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestAPIKeyViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.authenticate()
|
||||
|
||||
def test_get_api_keys(self):
|
||||
url = "/accounts/apikeys/"
|
||||
apikeys = baker.make("accounts.APIKey", key=seq("APIKEY"), _quantity=3)
|
||||
|
||||
serializer = APIKeySerializer(apikeys, many=True)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_api_keys(self):
|
||||
url = "/accounts/apikeys/"
|
||||
|
||||
user = baker.make("accounts.User")
|
||||
data = {"name": "Name", "user": user.id, "expiration": None}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(APIKey.objects.filter(name="Name").exists())
|
||||
self.assertTrue(APIKey.objects.get(name="Name").key)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_modify_api_key(self):
|
||||
# test a call where api key doesn't exist
|
||||
resp = self.client.put("/accounts/apikeys/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
apikey = baker.make("accounts.APIKey", name="Test")
|
||||
url = f"/accounts/apikeys/{apikey.pk}/" # type: ignore
|
||||
|
||||
data = {"name": "New Name"} # type: ignore
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
apikey = APIKey.objects.get(pk=apikey.pk) # type: ignore
|
||||
self.assertEquals(apikey.name, "New Name")
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_api_key(self):
|
||||
# test a call where api key doesn't exist
|
||||
resp = self.client.delete("/accounts/apikeys/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete api key
|
||||
apikey = baker.make("accounts.APIKey")
|
||||
url = f"/accounts/apikeys/{apikey.pk}/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.assertFalse(APIKey.objects.filter(pk=apikey.pk).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
|
||||
class TestTOTPSetup(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
@@ -313,3 +383,29 @@ class TestTOTPSetup(TacticalTestCase):
|
||||
r = self.client.post(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "totp token already set")
|
||||
|
||||
|
||||
class TestAPIAuthentication(TacticalTestCase):
|
||||
def setUp(self):
|
||||
# create User and associate to API Key
|
||||
self.user = User.objects.create(username="api_user", is_superuser=True)
|
||||
self.api_key = APIKey.objects.create(
|
||||
name="Test Token", key="123456", user=self.user
|
||||
)
|
||||
|
||||
self.client_setup()
|
||||
|
||||
def test_api_auth(self):
|
||||
url = "/clients/clients/"
|
||||
# auth should fail if no header set
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
# invalid api key in header should return code 400
|
||||
self.client.credentials(HTTP_X_API_KEY="000000")
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 401)
|
||||
|
||||
# valid api key in header should return code 200
|
||||
self.client.credentials(HTTP_X_API_KEY="123456")
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -12,4 +12,6 @@ urlpatterns = [
|
||||
path("permslist/", views.PermsList.as_view()),
|
||||
path("roles/", views.GetAddRoles.as_view()),
|
||||
path("<int:pk>/role/", views.GetUpdateDeleteRole.as_view()),
|
||||
path("apikeys/", views.GetAddAPIKeys.as_view()),
|
||||
path("apikeys/<int:pk>/", views.GetUpdateDeleteAPIKey.as_view()),
|
||||
]
|
||||
|
||||
@@ -3,23 +3,24 @@ from django.conf import settings
|
||||
from django.contrib.auth import login
|
||||
from django.db import IntegrityError
|
||||
from django.shortcuts import get_object_or_404
|
||||
from ipware import get_client_ip
|
||||
from knox.views import LoginView as KnoxLoginView
|
||||
from logs.models import AuditLog
|
||||
from rest_framework import status
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import User, Role
|
||||
from .permissions import AccountsPerms, RolesPerms
|
||||
from .models import APIKey, Role, User
|
||||
from .permissions import APIKeyPerms, AccountsPerms, RolesPerms
|
||||
from .serializers import (
|
||||
APIKeySerializer,
|
||||
RoleSerializer,
|
||||
TOTPSetupSerializer,
|
||||
UserSerializer,
|
||||
UserUISerializer,
|
||||
RoleSerializer,
|
||||
)
|
||||
|
||||
|
||||
@@ -40,11 +41,16 @@ class CheckCreds(KnoxLoginView):
|
||||
# check credentials
|
||||
serializer = AuthTokenSerializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
AuditLog.audit_user_failed_login(request.data["username"])
|
||||
AuditLog.audit_user_failed_login(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
user = serializer.validated_data["user"]
|
||||
|
||||
if user.block_dashboard_login:
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# if totp token not set modify response to notify frontend
|
||||
if not user.totp_key:
|
||||
login(request, user)
|
||||
@@ -66,6 +72,9 @@ class LoginView(KnoxLoginView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
user = serializer.validated_data["user"]
|
||||
|
||||
if user.block_dashboard_login:
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
token = request.data["twofactor"]
|
||||
totp = pyotp.TOTP(user.totp_key)
|
||||
|
||||
@@ -76,10 +85,20 @@ class LoginView(KnoxLoginView):
|
||||
|
||||
if valid:
|
||||
login(request, user)
|
||||
AuditLog.audit_user_login_successful(request.data["username"])
|
||||
|
||||
# save ip information
|
||||
client_ip, is_routable = get_client_ip(request)
|
||||
user.last_login_ip = client_ip
|
||||
user.save()
|
||||
|
||||
AuditLog.audit_user_login_successful(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return super(LoginView, self).post(request, format=None)
|
||||
else:
|
||||
AuditLog.audit_user_failed_twofactor(request.data["username"])
|
||||
AuditLog.audit_user_failed_twofactor(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@@ -87,7 +106,14 @@ class GetAddUsers(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
def get(self, request):
|
||||
users = User.objects.filter(agent=None, is_installer_user=False)
|
||||
search = request.GET.get("search", None)
|
||||
|
||||
if search:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False).filter(
|
||||
username__icontains=search
|
||||
)
|
||||
else:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False)
|
||||
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
|
||||
@@ -104,8 +130,10 @@ class GetAddUsers(APIView):
|
||||
f"ERROR: User {request.data['username']} already exists!"
|
||||
)
|
||||
|
||||
user.first_name = request.data["first_name"]
|
||||
user.last_name = request.data["last_name"]
|
||||
if "first_name" in request.data.keys():
|
||||
user.first_name = request.data["first_name"]
|
||||
if "last_name" in request.data.keys():
|
||||
user.last_name = request.data["last_name"]
|
||||
if "role" in request.data.keys() and isinstance(request.data["role"], int):
|
||||
role = get_object_or_404(Role, pk=request.data["role"])
|
||||
user.role = role
|
||||
@@ -233,3 +261,48 @@ class GetUpdateDeleteRole(APIView):
|
||||
role = get_object_or_404(Role, pk=pk)
|
||||
role.delete()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class GetAddAPIKeys(APIView):
|
||||
permission_classes = [IsAuthenticated, APIKeyPerms]
|
||||
|
||||
def get(self, request):
|
||||
apikeys = APIKey.objects.all()
|
||||
return Response(APIKeySerializer(apikeys, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
# generate a random API Key
|
||||
# https://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits/23728630#23728630
|
||||
import random
|
||||
import string
|
||||
|
||||
request.data["key"] = "".join(
|
||||
random.SystemRandom().choice(string.ascii_uppercase + string.digits)
|
||||
for _ in range(32)
|
||||
)
|
||||
|
||||
serializer = APIKeySerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
return Response("The API Key was added")
|
||||
|
||||
|
||||
class GetUpdateDeleteAPIKey(APIView):
|
||||
permission_classes = [IsAuthenticated, APIKeyPerms]
|
||||
|
||||
def put(self, request, pk):
|
||||
apikey = get_object_or_404(APIKey, pk=pk)
|
||||
|
||||
# remove API key is present in request data
|
||||
if "key" in request.data.keys():
|
||||
request.data.pop("key")
|
||||
|
||||
serializer = APIKeySerializer(instance=apikey, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("The API Key was edited")
|
||||
|
||||
def delete(self, request, pk):
|
||||
apikey = get_object_or_404(APIKey, pk=pk)
|
||||
apikey.delete()
|
||||
return Response("The API Key was deleted")
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction, AgentHistory
|
||||
|
||||
admin.site.register(Agent)
|
||||
admin.site.register(RecoveryAction)
|
||||
admin.site.register(Note)
|
||||
admin.site.register(AgentCustomField)
|
||||
admin.site.register(AgentHistory)
|
||||
|
||||
27
api/tacticalrmm/agents/migrations/0038_agenthistory.py
Normal file
27
api/tacticalrmm/agents/migrations/0038_agenthistory.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-06 02:01
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0037_auto_20210627_0014'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AgentHistory',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('time', models.DateTimeField(auto_now_add=True)),
|
||||
('type', models.CharField(choices=[('task_run', 'Task Run'), ('script_run', 'Script Run'), ('cmd_run', 'CMD Run')], default='cmd_run', max_length=50)),
|
||||
('command', models.TextField(blank=True, null=True)),
|
||||
('status', models.CharField(choices=[('success', 'Success'), ('failure', 'Failure')], default='success', max_length=50)),
|
||||
('username', models.CharField(default='system', max_length=50)),
|
||||
('results', models.TextField(blank=True, null=True)),
|
||||
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='history', to='agents.agent')),
|
||||
],
|
||||
),
|
||||
]
|
||||
25
api/tacticalrmm/agents/migrations/0039_auto_20210714_0738.py
Normal file
25
api/tacticalrmm/agents/migrations/0039_auto_20210714_0738.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 3.2.5 on 2021-07-14 07:38
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0008_script_guid'),
|
||||
('agents', '0038_agenthistory'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agenthistory',
|
||||
name='script',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='history', to='scripts.script'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='agenthistory',
|
||||
name='script_results',
|
||||
field=models.JSONField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -16,14 +16,12 @@ from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from nats.aio.client import Client as NATS
|
||||
from nats.aio.errors import ErrTimeout
|
||||
from packaging import version as pyver
|
||||
|
||||
from core.models import TZ_CHOICES, CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
|
||||
|
||||
class Agent(BaseAuditModel):
|
||||
@@ -89,10 +87,11 @@ class Agent(BaseAuditModel):
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old agent if exists
|
||||
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
old_agent = Agent.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Agent, self).save(old_model=old_agent, *args, **kwargs)
|
||||
|
||||
# check if new agent has been created
|
||||
# or check if policy have changed on agent
|
||||
@@ -105,8 +104,11 @@ class Agent(BaseAuditModel):
|
||||
or (old_agent.monitoring_type != self.monitoring_type)
|
||||
or (old_agent.block_policy_inheritance != self.block_policy_inheritance)
|
||||
):
|
||||
self.generate_checks_from_policies()
|
||||
self.generate_tasks_from_policies()
|
||||
generate_agent_checks_task.delay(agents=[self.pk], create_tasks=True)
|
||||
|
||||
# calculate alert template for new agents
|
||||
if not old_agent:
|
||||
self.set_alert_template()
|
||||
|
||||
def __str__(self):
|
||||
return self.hostname
|
||||
@@ -123,7 +125,7 @@ class Agent(BaseAuditModel):
|
||||
else:
|
||||
from core.models import CoreSettings
|
||||
|
||||
return CoreSettings.objects.first().default_time_zone
|
||||
return CoreSettings.objects.first().default_time_zone # type: ignore
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
@@ -325,6 +327,7 @@ class Agent(BaseAuditModel):
|
||||
full: bool = False,
|
||||
wait: bool = False,
|
||||
run_on_any: bool = False,
|
||||
history_pk: int = 0,
|
||||
) -> Any:
|
||||
|
||||
from scripts.models import Script
|
||||
@@ -343,6 +346,9 @@ class Agent(BaseAuditModel):
|
||||
},
|
||||
}
|
||||
|
||||
if history_pk != 0 and pyver.parse(self.version) >= pyver.parse("1.6.0"):
|
||||
data["id"] = history_pk
|
||||
|
||||
running_agent = self
|
||||
if run_on_any:
|
||||
nats_ping = {"func": "ping"}
|
||||
@@ -411,6 +417,13 @@ class Agent(BaseAuditModel):
|
||||
update.action = "approve"
|
||||
update.save(update_fields=["action"])
|
||||
|
||||
if updates:
|
||||
DebugLog.info(
|
||||
agent=self,
|
||||
log_type="windows_updates",
|
||||
message=f"Approving windows updates on {self.hostname}",
|
||||
)
|
||||
|
||||
# returns agent policy merged with a client or site specific policy
|
||||
def get_patch_policy(self):
|
||||
|
||||
@@ -445,8 +458,8 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.server_policy
|
||||
and core_settings.server_policy.winupdatepolicy.exists()
|
||||
core_settings.server_policy # type: ignore
|
||||
and core_settings.server_policy.winupdatepolicy.exists() # type: ignore
|
||||
):
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
@@ -454,7 +467,7 @@ class Agent(BaseAuditModel):
|
||||
and not site.block_policy_inheritance
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get()
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get() # type: ignore
|
||||
|
||||
elif self.monitoring_type == "workstation":
|
||||
# check agent policy first which should override client or site policy
|
||||
@@ -483,8 +496,8 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.workstation_policy
|
||||
and core_settings.workstation_policy.winupdatepolicy.exists()
|
||||
core_settings.workstation_policy # type: ignore
|
||||
and core_settings.workstation_policy.winupdatepolicy.exists() # type: ignore
|
||||
):
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
@@ -493,7 +506,7 @@ class Agent(BaseAuditModel):
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = (
|
||||
core_settings.workstation_policy.winupdatepolicy.get()
|
||||
core_settings.workstation_policy.winupdatepolicy.get() # type: ignore
|
||||
)
|
||||
|
||||
# if policy still doesn't exist return the agent patch policy
|
||||
@@ -608,35 +621,35 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# check if alert template is applied globally and return
|
||||
if (
|
||||
core.alert_template
|
||||
and core.alert_template.is_active
|
||||
core.alert_template # type: ignore
|
||||
and core.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.alert_template)
|
||||
templates.append(core.alert_template) # type: ignore
|
||||
|
||||
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
||||
if (
|
||||
self.monitoring_type == "server"
|
||||
and core.server_policy
|
||||
and core.server_policy.alert_template
|
||||
and core.server_policy.alert_template.is_active
|
||||
and core.server_policy # type: ignore
|
||||
and core.server_policy.alert_template # type: ignore
|
||||
and core.server_policy.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.server_policy.alert_template)
|
||||
templates.append(core.server_policy.alert_template) # type: ignore
|
||||
if (
|
||||
self.monitoring_type == "workstation"
|
||||
and core.workstation_policy
|
||||
and core.workstation_policy.alert_template
|
||||
and core.workstation_policy.alert_template.is_active
|
||||
and core.workstation_policy # type: ignore
|
||||
and core.workstation_policy.alert_template # type: ignore
|
||||
and core.workstation_policy.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.workstation_policy.alert_template)
|
||||
templates.append(core.workstation_policy.alert_template) # type: ignore
|
||||
|
||||
# go through the templates and return the first one that isn't excluded
|
||||
for template in templates:
|
||||
@@ -739,7 +752,7 @@ class Agent(BaseAuditModel):
|
||||
try:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(agent=self, log_type="agent_issues", message=e)
|
||||
ret = str(e)
|
||||
|
||||
await nc.close()
|
||||
@@ -752,12 +765,9 @@ class Agent(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(agent):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import AgentEditSerializer
|
||||
from .serializers import AgentAuditSerializer
|
||||
|
||||
ret = AgentEditSerializer(agent).data
|
||||
del ret["all_timezones"]
|
||||
del ret["client"]
|
||||
return ret
|
||||
return AgentAuditSerializer(agent).data
|
||||
|
||||
def delete_superseded_updates(self):
|
||||
try:
|
||||
@@ -772,7 +782,7 @@ class Agent(BaseAuditModel):
|
||||
# skip if no version info is available therefore nothing to parse
|
||||
try:
|
||||
vers = [
|
||||
re.search(r"\(Version(.*?)\)", i).group(1).strip()
|
||||
re.search(r"\(Version(.*?)\)", i).group(1).strip() # type: ignore
|
||||
for i in titles
|
||||
]
|
||||
sorted_vers = sorted(vers, key=LooseVersion)
|
||||
@@ -807,7 +817,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
CORE.send_mail( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
(
|
||||
f"Data has not been received from client {self.client.name}, "
|
||||
@@ -822,7 +832,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
CORE.send_mail( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
(
|
||||
f"Data has been received from client {self.client.name}, "
|
||||
@@ -837,7 +847,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
CORE.send_sms( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
@@ -846,7 +856,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
CORE.send_sms( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
@@ -928,3 +938,57 @@ class AgentCustomField(models.Model):
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif self.field.type == "multiple":
|
||||
self.multiple_value = value.split(",")
|
||||
self.save()
|
||||
elif self.field.type == "checkbox":
|
||||
self.bool_value = bool(value)
|
||||
self.save()
|
||||
|
||||
|
||||
AGENT_HISTORY_TYPES = (
|
||||
("task_run", "Task Run"),
|
||||
("script_run", "Script Run"),
|
||||
("cmd_run", "CMD Run"),
|
||||
)
|
||||
|
||||
AGENT_HISTORY_STATUS = (("success", "Success"), ("failure", "Failure"))
|
||||
|
||||
|
||||
class AgentHistory(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="history",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
time = models.DateTimeField(auto_now_add=True)
|
||||
type = models.CharField(
|
||||
max_length=50, choices=AGENT_HISTORY_TYPES, default="cmd_run"
|
||||
)
|
||||
command = models.TextField(null=True, blank=True)
|
||||
status = models.CharField(
|
||||
max_length=50, choices=AGENT_HISTORY_STATUS, default="success"
|
||||
)
|
||||
username = models.CharField(max_length=50, default="system")
|
||||
results = models.TextField(null=True, blank=True)
|
||||
script = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="history",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_results = models.JSONField(null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.agent.hostname} - {self.type}"
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from clients.serializers import ClientSerializer
|
||||
from rest_framework import serializers
|
||||
from tacticalrmm.utils import get_default_timezone
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, AgentCustomField, Note
|
||||
from .models import Agent, AgentCustomField, Note, AgentHistory
|
||||
|
||||
|
||||
class AgentSerializer(serializers.ModelSerializer):
|
||||
@@ -159,6 +159,7 @@ class AgentEditSerializer(serializers.ModelSerializer):
|
||||
"offline_time",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_dashboard_alert",
|
||||
"all_timezones",
|
||||
"winupdatepolicy",
|
||||
"policy",
|
||||
@@ -200,3 +201,22 @@ class NotesSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = ["hostname", "pk", "notes"]
|
||||
|
||||
|
||||
class AgentHistorySerializer(serializers.ModelSerializer):
|
||||
time = serializers.SerializerMethodField(read_only=True)
|
||||
script_name = serializers.ReadOnlyField(source="script.name")
|
||||
|
||||
class Meta:
|
||||
model = AgentHistory
|
||||
fields = "__all__"
|
||||
|
||||
def get_time(self, history):
|
||||
tz = self.context["default_tz"]
|
||||
return history.time.astimezone(tz).strftime("%m %d %Y %H:%M:%S")
|
||||
|
||||
|
||||
class AgentAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
exclude = ["disks", "services", "wmi_detail"]
|
||||
|
||||
@@ -1,26 +1,21 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import random
|
||||
import tempfile
|
||||
import json
|
||||
import subprocess
|
||||
import urllib.parse
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
from alerts.models import Alert
|
||||
from core.models import CodeSignToken, CoreSettings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from logs.models import DebugLog, PendingAction
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CodeSignToken, CoreSettings
|
||||
from logs.models import PendingAction
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.utils import run_nats_api_cmd
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str:
|
||||
@@ -33,8 +28,10 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(
|
||||
f"Unable to determine arch on {agent.hostname}. Skipping agent update."
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to determine arch on {agent.hostname}({agent.pk}). Skipping agent update.",
|
||||
)
|
||||
return "noarch"
|
||||
|
||||
@@ -81,7 +78,7 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
|
||||
@app.task
|
||||
def force_code_sign(pks: list[int]) -> None:
|
||||
try:
|
||||
token = CodeSignToken.objects.first().token
|
||||
token = CodeSignToken.objects.first().token # type:ignore
|
||||
except:
|
||||
return
|
||||
|
||||
@@ -96,7 +93,7 @@ def force_code_sign(pks: list[int]) -> None:
|
||||
@app.task
|
||||
def send_agent_update_task(pks: list[int]) -> None:
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
codesigntoken = CodeSignToken.objects.first().token # type:ignore
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
@@ -111,11 +108,11 @@ def send_agent_update_task(pks: list[int]) -> None:
|
||||
@app.task
|
||||
def auto_self_agent_update_task() -> None:
|
||||
core = CoreSettings.objects.first()
|
||||
if not core.agent_auto_update:
|
||||
if not core.agent_auto_update: # type:ignore
|
||||
return
|
||||
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
codesigntoken = CodeSignToken.objects.first().token # type:ignore
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
@@ -235,14 +232,24 @@ def run_script_email_results_task(
|
||||
nats_timeout: int,
|
||||
emails: list[str],
|
||||
args: list[str] = [],
|
||||
history_pk: int = 0,
|
||||
):
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk, args=args, full=True, timeout=nats_timeout, wait=True
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
full=True,
|
||||
timeout=nats_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
if r == "timeout":
|
||||
logger.error(f"{agent.hostname} timed out running script.")
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"{agent.hostname}({agent.pk}) timed out running script.",
|
||||
)
|
||||
return
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
@@ -258,28 +265,32 @@ def run_script_email_results_task(
|
||||
|
||||
msg = EmailMessage()
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = CORE.smtp_from_email
|
||||
msg["From"] = CORE.smtp_from_email # type:ignore
|
||||
|
||||
if emails:
|
||||
msg["To"] = ", ".join(emails)
|
||||
else:
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients)
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients) # type:ignore
|
||||
|
||||
msg.set_content(body)
|
||||
|
||||
try:
|
||||
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
|
||||
if CORE.smtp_requires_auth:
|
||||
with smtplib.SMTP(
|
||||
CORE.smtp_host, CORE.smtp_port, timeout=20 # type:ignore
|
||||
) as server: # type:ignore
|
||||
if CORE.smtp_requires_auth: # type:ignore
|
||||
server.ehlo()
|
||||
server.starttls()
|
||||
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
|
||||
server.login(
|
||||
CORE.smtp_host_user, CORE.smtp_host_password # type:ignore
|
||||
) # type:ignore
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
else:
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(message=e)
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -310,15 +321,6 @@ def clear_faults_task(older_than_days: int) -> None:
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
def monitor_agents_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ids = [i.agent_id for i in agents if i.status != "online"]
|
||||
run_nats_api_cmd("monitor", ids)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
@@ -330,18 +332,62 @@ def get_wmi_task() -> None:
|
||||
|
||||
@app.task
|
||||
def agent_checkin_task() -> None:
|
||||
db = settings.DATABASES["default"]
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"user": db["USER"],
|
||||
"pass": db["PASSWORD"],
|
||||
"host": db["HOST"],
|
||||
"port": int(db["PORT"]),
|
||||
"dbname": db["NAME"],
|
||||
}
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", "checkin"]
|
||||
subprocess.run(cmd, timeout=30)
|
||||
run_nats_api_cmd("checkin", timeout=30)
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_getinfo_task() -> None:
|
||||
run_nats_api_cmd("agentinfo", timeout=30)
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_agent_history(older_than_days: int) -> str:
|
||||
from .models import AgentHistory
|
||||
|
||||
AgentHistory.objects.filter(
|
||||
time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_agents_task() -> None:
|
||||
q = Agent.objects.prefetch_related("pendingactions", "autotasks").only(
|
||||
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
agents = [
|
||||
i
|
||||
for i in q
|
||||
if pyver.parse(i.version) >= pyver.parse("1.6.0") and i.status == "online"
|
||||
]
|
||||
for agent in agents:
|
||||
# change agent update pending status to completed if agent has just updated
|
||||
if (
|
||||
pyver.parse(agent.version) == pyver.parse(settings.LATEST_AGENT_VER)
|
||||
and agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists()
|
||||
):
|
||||
agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).update(status="completed")
|
||||
|
||||
# sync scheduled tasks
|
||||
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
|
||||
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
|
||||
|
||||
for task in tasks:
|
||||
if task.sync_status == "pendingdeletion":
|
||||
task.delete_task_on_agent()
|
||||
elif task.sync_status == "initial":
|
||||
task.modify_task_on_agent()
|
||||
elif task.sync_status == "notsynced":
|
||||
task.create_task_on_agent()
|
||||
|
||||
# handles any alerting actions
|
||||
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
||||
try:
|
||||
Alert.handle_alert_resolve(agent)
|
||||
except:
|
||||
continue
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
import json
|
||||
import os
|
||||
from itertools import cycle
|
||||
import pytz
|
||||
from django.utils import timezone as djangotime
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from logs.models import PendingAction
|
||||
from model_bakery import baker
|
||||
from packaging import version as pyver
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, AgentCustomField
|
||||
from .serializers import AgentSerializer
|
||||
from .models import Agent, AgentCustomField, AgentHistory
|
||||
from .serializers import AgentHistorySerializer, AgentSerializer
|
||||
from .tasks import auto_self_agent_update_task
|
||||
|
||||
|
||||
@@ -306,7 +306,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"shell": "cmd",
|
||||
"timeout": 30,
|
||||
}
|
||||
mock_ret.return_value = "nt authority\system"
|
||||
mock_ret.return_value = "nt authority\\system"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIsInstance(r.data, str) # type: ignore
|
||||
@@ -437,7 +437,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 1)
|
||||
mesh_recovery = RecoveryAction.objects.first()
|
||||
self.assertEqual(mesh_recovery.mode, "mesh")
|
||||
self.assertEqual(mesh_recovery.mode, "mesh") # type: ignore
|
||||
nats_cmd.reset_mock()
|
||||
RecoveryAction.objects.all().delete()
|
||||
|
||||
@@ -472,8 +472,8 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(RecoveryAction.objects.count(), 1)
|
||||
cmd_recovery = RecoveryAction.objects.first()
|
||||
self.assertEqual(cmd_recovery.mode, "command")
|
||||
self.assertEqual(cmd_recovery.command, "shutdown /r /t 10 /f")
|
||||
self.assertEqual(cmd_recovery.mode, "command") # type: ignore
|
||||
self.assertEqual(cmd_recovery.command, "shutdown /r /t 10 /f") # type: ignore
|
||||
|
||||
def test_agents_agent_detail(self):
|
||||
url = f"/agents/{self.agent.pk}/agentdetail/"
|
||||
@@ -770,6 +770,9 @@ class TestAgentViews(TacticalTestCase):
|
||||
@patch("agents.tasks.run_script_email_results_task.delay")
|
||||
@patch("agents.models.Agent.run_script")
|
||||
def test_run_script(self, run_script, email_task):
|
||||
from .models import AgentCustomField, Note
|
||||
from clients.models import ClientCustomField, SiteCustomField
|
||||
|
||||
run_script.return_value = "ok"
|
||||
url = "/agents/runscript/"
|
||||
script = baker.make_recipe("scripts.script")
|
||||
@@ -777,7 +780,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
# test wait
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"script": script.pk,
|
||||
"output": "wait",
|
||||
"args": [],
|
||||
"timeout": 15,
|
||||
@@ -786,18 +789,18 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk, args=[], timeout=18, wait=True
|
||||
scriptpk=script.pk, args=[], timeout=18, wait=True, history_pk=0
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
# test email default
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"script": script.pk,
|
||||
"output": "email",
|
||||
"args": ["abc", "123"],
|
||||
"timeout": 15,
|
||||
"emailmode": "default",
|
||||
"emailMode": "default",
|
||||
"emails": ["admin@example.com", "bob@example.com"],
|
||||
}
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -812,7 +815,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
email_task.reset_mock()
|
||||
|
||||
# test email overrides
|
||||
data["emailmode"] = "custom"
|
||||
data["emailMode"] = "custom"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
email_task.assert_called_with(
|
||||
@@ -826,7 +829,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
# test fire and forget
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"scriptPK": script.pk,
|
||||
"script": script.pk,
|
||||
"output": "forget",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
@@ -835,8 +838,139 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk, args=["hello", "world"], timeout=25
|
||||
scriptpk=script.pk, args=["hello", "world"], timeout=25, history_pk=0
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
# test collector
|
||||
|
||||
# save to agent custom field
|
||||
custom_field = baker.make("core.CustomField", model="agent")
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "collector",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"custom_field": custom_field.id, # type: ignore
|
||||
"save_all_output": True,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(
|
||||
AgentCustomField.objects.get(agent=self.agent.pk, field=custom_field).value,
|
||||
"ok",
|
||||
)
|
||||
|
||||
# save to site custom field
|
||||
custom_field = baker.make("core.CustomField", model="site")
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "collector",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"custom_field": custom_field.id, # type: ignore
|
||||
"save_all_output": False,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(
|
||||
SiteCustomField.objects.get(
|
||||
site=self.agent.site.pk, field=custom_field
|
||||
).value,
|
||||
"ok",
|
||||
)
|
||||
|
||||
# save to client custom field
|
||||
custom_field = baker.make("core.CustomField", model="client")
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "collector",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
"custom_field": custom_field.id, # type: ignore
|
||||
"save_all_output": False,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(
|
||||
ClientCustomField.objects.get(
|
||||
client=self.agent.client.pk, field=custom_field
|
||||
).value,
|
||||
"ok",
|
||||
)
|
||||
|
||||
# test save to note
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"script": script.pk,
|
||||
"output": "note",
|
||||
"args": ["hello", "world"],
|
||||
"timeout": 22,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
run_script.assert_called_with(
|
||||
scriptpk=script.pk,
|
||||
args=["hello", "world"],
|
||||
timeout=25,
|
||||
wait=True,
|
||||
history_pk=0,
|
||||
)
|
||||
run_script.reset_mock()
|
||||
|
||||
self.assertEqual(Note.objects.get(agent=self.agent).note, "ok")
|
||||
|
||||
def test_get_agent_history(self):
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
history = baker.make("agents.AgentHistory", agent=agent, _quantity=30)
|
||||
url = f"/agents/history/{agent.id}/"
|
||||
|
||||
# test agent not found
|
||||
r = self.client.get("/agents/history/500/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
# test pulling data
|
||||
r = self.client.get(url, format="json")
|
||||
ctx = {"default_tz": pytz.timezone("America/Los_Angeles")}
|
||||
data = AgentHistorySerializer(history, many=True, context=ctx).data
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, data) # type:ignore
|
||||
|
||||
|
||||
class TestAgentViewsNew(TacticalTestCase):
|
||||
@@ -1048,3 +1182,25 @@ class TestAgentTasks(TacticalTestCase):
|
||||
|
||||
r = auto_self_agent_update_task.s().apply()
|
||||
self.assertEqual(agent_update.call_count, 33)
|
||||
|
||||
def test_agent_history_prune_task(self):
|
||||
from .tasks import prune_agent_history
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
history = baker.make(
|
||||
"agents.AgentHistory",
|
||||
agent=agent,
|
||||
_quantity=50,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for item in history: # type: ignore
|
||||
item.time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
item.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_agent_history(30)
|
||||
|
||||
self.assertEqual(AgentHistory.objects.filter(agent=agent).count(), 6)
|
||||
|
||||
@@ -29,4 +29,5 @@ urlpatterns = [
|
||||
path("bulk/", views.bulk),
|
||||
path("maintenance/", views.agent_maintenance),
|
||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||
path("history/<int:pk>/", views.AgentHistoryView.as_view()),
|
||||
]
|
||||
|
||||
@@ -8,7 +8,6 @@ import time
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
@@ -17,14 +16,14 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from core.models import CoreSettings
|
||||
from logs.models import AuditLog, PendingAction
|
||||
from logs.models import AuditLog, DebugLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction, AgentHistory
|
||||
from .permissions import (
|
||||
EditAgentPerms,
|
||||
EvtLogPerms,
|
||||
@@ -42,6 +41,7 @@ from .permissions import (
|
||||
from .serializers import (
|
||||
AgentCustomFieldSerializer,
|
||||
AgentEditSerializer,
|
||||
AgentHistorySerializer,
|
||||
AgentHostnameSerializer,
|
||||
AgentOverdueActionSerializer,
|
||||
AgentSerializer,
|
||||
@@ -51,8 +51,6 @@ from .serializers import (
|
||||
)
|
||||
from .tasks import run_script_email_results_task, send_agent_update_task
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view()
|
||||
def get_agent_versions(request):
|
||||
@@ -115,7 +113,7 @@ def uninstall(request):
|
||||
def edit_agent(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||
|
||||
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer = AgentEditSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer.is_valid(raise_exception=True)
|
||||
a_serializer.save()
|
||||
|
||||
@@ -160,17 +158,21 @@ def meshcentral(request, pk):
|
||||
core = CoreSettings.objects.first()
|
||||
|
||||
token = agent.get_login_token(
|
||||
key=core.mesh_token, user=f"user//{core.mesh_username}"
|
||||
key=core.mesh_token, user=f"user//{core.mesh_username}" # type:ignore
|
||||
)
|
||||
|
||||
if token == "err":
|
||||
return notify_error("Invalid mesh token")
|
||||
|
||||
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31" # type:ignore
|
||||
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31" # type:ignore
|
||||
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31" # type:ignore
|
||||
|
||||
AuditLog.audit_mesh_session(username=request.user.username, hostname=agent.hostname)
|
||||
AuditLog.audit_mesh_session(
|
||||
username=request.user.username,
|
||||
agent=agent,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
ret = {
|
||||
"hostname": agent.hostname,
|
||||
@@ -248,6 +250,16 @@ def send_raw_cmd(request):
|
||||
"shell": request.data["shell"],
|
||||
},
|
||||
}
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="cmd_run",
|
||||
command=request.data["cmd"],
|
||||
username=request.user.username[:50],
|
||||
)
|
||||
data["id"] = hist.pk
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
|
||||
if r == "timeout":
|
||||
@@ -255,9 +267,10 @@ def send_raw_cmd(request):
|
||||
|
||||
AuditLog.audit_raw_command(
|
||||
username=request.user.username,
|
||||
hostname=agent.hostname,
|
||||
agent=agent,
|
||||
cmd=request.data["cmd"],
|
||||
shell=request.data["shell"],
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
return Response(r)
|
||||
@@ -508,7 +521,7 @@ def install_agent(request):
|
||||
try:
|
||||
os.remove(ps1)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
DebugLog.error(message=str(e))
|
||||
|
||||
with open(ps1, "w") as f:
|
||||
f.write(text)
|
||||
@@ -566,26 +579,41 @@ def recover(request):
|
||||
@permission_classes([IsAuthenticated, RunScriptPerms])
|
||||
def run_script(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
script = get_object_or_404(Script, pk=request.data["script"])
|
||||
output = request.data["output"]
|
||||
args = request.data["args"]
|
||||
req_timeout = int(request.data["timeout"]) + 3
|
||||
|
||||
AuditLog.audit_script_run(
|
||||
username=request.user.username,
|
||||
hostname=agent.hostname,
|
||||
agent=agent,
|
||||
script=script.name,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
history_pk = 0
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="script_run",
|
||||
script=script,
|
||||
username=request.user.username[:50],
|
||||
)
|
||||
history_pk = hist.pk
|
||||
|
||||
if output == "wait":
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk, args=args, timeout=req_timeout, wait=True
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
timeout=req_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
return Response(r)
|
||||
|
||||
elif output == "email":
|
||||
emails = (
|
||||
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||
[] if request.data["emailMode"] == "default" else request.data["emails"]
|
||||
)
|
||||
run_script_email_results_task.delay(
|
||||
agentpk=agent.pk,
|
||||
@@ -594,8 +622,51 @@ def run_script(request):
|
||||
emails=emails,
|
||||
args=args,
|
||||
)
|
||||
elif output == "collector":
|
||||
from core.models import CustomField
|
||||
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
timeout=req_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
|
||||
custom_field = CustomField.objects.get(pk=request.data["custom_field"])
|
||||
|
||||
if custom_field.model == "agent":
|
||||
field = custom_field.get_or_create_field_value(agent)
|
||||
elif custom_field.model == "client":
|
||||
field = custom_field.get_or_create_field_value(agent.client)
|
||||
elif custom_field.model == "site":
|
||||
field = custom_field.get_or_create_field_value(agent.site)
|
||||
else:
|
||||
return notify_error("Custom Field was invalid")
|
||||
|
||||
value = (
|
||||
r.strip()
|
||||
if request.data["save_all_output"]
|
||||
else r.strip().split("\n")[-1].strip()
|
||||
)
|
||||
|
||||
field.save_to_field(value)
|
||||
return Response(r)
|
||||
elif output == "note":
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
timeout=req_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
|
||||
Note.objects.create(agent=agent, user=request.user, note=r)
|
||||
return Response(r)
|
||||
else:
|
||||
agent.run_script(scriptpk=script.pk, args=args, timeout=req_timeout)
|
||||
agent.run_script(
|
||||
scriptpk=script.pk, args=args, timeout=req_timeout, history_pk=history_pk
|
||||
)
|
||||
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
|
||||
@@ -668,7 +739,7 @@ class GetEditDeleteNote(APIView):
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAuthenticated, RunBulkPerms])
|
||||
def bulk(request):
|
||||
if request.data["target"] == "agents" and not request.data["agentPKs"]:
|
||||
if request.data["target"] == "agents" and not request.data["agents"]:
|
||||
return notify_error("Must select at least 1 agent")
|
||||
|
||||
if request.data["target"] == "client":
|
||||
@@ -676,7 +747,7 @@ def bulk(request):
|
||||
elif request.data["target"] == "site":
|
||||
q = Agent.objects.filter(site_id=request.data["site"])
|
||||
elif request.data["target"] == "agents":
|
||||
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
q = Agent.objects.filter(pk__in=request.data["agents"])
|
||||
elif request.data["target"] == "all":
|
||||
q = Agent.objects.only("pk", "monitoring_type")
|
||||
else:
|
||||
@@ -689,29 +760,48 @@ def bulk(request):
|
||||
|
||||
agents: list[int] = [agent.pk for agent in q]
|
||||
|
||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||
if not agents:
|
||||
return notify_error("No agents where found meeting the selected criteria")
|
||||
|
||||
AuditLog.audit_bulk_action(
|
||||
request.user,
|
||||
request.data["mode"],
|
||||
request.data,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
if request.data["mode"] == "command":
|
||||
handle_bulk_command_task.delay(
|
||||
agents, request.data["cmd"], request.data["shell"], request.data["timeout"]
|
||||
agents,
|
||||
request.data["cmd"],
|
||||
request.data["shell"],
|
||||
request.data["timeout"],
|
||||
request.user.username[:50],
|
||||
run_on_offline=request.data["offlineAgents"],
|
||||
)
|
||||
return Response(f"Command will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "script":
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
script = get_object_or_404(Script, pk=request.data["script"])
|
||||
handle_bulk_script_task.delay(
|
||||
script.pk, agents, request.data["args"], request.data["timeout"]
|
||||
script.pk,
|
||||
agents,
|
||||
request.data["args"],
|
||||
request.data["timeout"],
|
||||
request.user.username[:50],
|
||||
)
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "install":
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["mode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(agents)
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
elif request.data["mode"] == "patch":
|
||||
|
||||
if request.data["patchMode"] == "install":
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["patchMode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(agents)
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
@@ -746,3 +836,11 @@ class WMI(APIView):
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class AgentHistoryView(APIView):
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
history = AgentHistory.objects.filter(agent=agent)
|
||||
ctx = {"default_tz": get_default_timezone()}
|
||||
return Response(AgentHistorySerializer(history, many=True, context=ctx).data)
|
||||
|
||||
33
api/tacticalrmm/alerts/migrations/0007_auto_20210721_0423.py
Normal file
33
api/tacticalrmm/alerts/migrations/0007_auto_20210721_0423.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0006_auto_20210217_1736'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/alerts/migrations/0008_auto_20210721_1757.py
Normal file
28
api/tacticalrmm/alerts/migrations/0008_auto_20210721_1757.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 17:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0007_auto_20210721_0423'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='check_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='task_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/alerts/migrations/0009_auto_20210721_1810.py
Normal file
28
api/tacticalrmm/alerts/migrations/0009_auto_20210721_1810.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 18:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0008_auto_20210721_1757'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='check_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='task_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -3,19 +3,18 @@ from __future__ import annotations
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
SEVERITY_CHOICES = [
|
||||
("info", "Informational"),
|
||||
@@ -173,6 +172,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.agent_always_email
|
||||
always_text = alert_template.agent_always_text
|
||||
alert_interval = alert_template.agent_periodic_alert_days
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_availability_alert(instance)
|
||||
@@ -209,6 +209,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.check_always_email
|
||||
always_text = alert_template.check_always_text
|
||||
alert_interval = alert_template.check_periodic_alert_days
|
||||
run_script_action = alert_template.check_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_check_alert(instance)
|
||||
@@ -242,6 +243,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.task_always_email
|
||||
always_text = alert_template.task_always_text
|
||||
alert_interval = alert_template.task_periodic_alert_days
|
||||
run_script_action = alert_template.task_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_task_alert(instance)
|
||||
@@ -295,7 +297,7 @@ class Alert(models.Model):
|
||||
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||
|
||||
# check if any scripts should be run
|
||||
if alert_template and alert_template.action and not alert.action_run:
|
||||
if alert_template and alert_template.action and run_script_action and not alert.action_run: # type: ignore
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
@@ -314,8 +316,10 @@ class Alert(models.Model):
|
||||
alert.action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname} failure alert"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -345,6 +349,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.agent_email_on_resolved
|
||||
text_on_resolved = alert_template.agent_text_on_resolved
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
|
||||
elif isinstance(instance, Check):
|
||||
from checks.tasks import (
|
||||
@@ -363,6 +368,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.check_email_on_resolved
|
||||
text_on_resolved = alert_template.check_text_on_resolved
|
||||
run_script_action = alert_template.check_script_actions
|
||||
|
||||
elif isinstance(instance, AutomatedTask):
|
||||
from autotasks.tasks import (
|
||||
@@ -381,6 +387,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.task_email_on_resolved
|
||||
text_on_resolved = alert_template.task_text_on_resolved
|
||||
run_script_action = alert_template.task_script_actions
|
||||
|
||||
else:
|
||||
return
|
||||
@@ -403,6 +410,7 @@ class Alert(models.Model):
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.resolved_action
|
||||
and run_script_action # type: ignore
|
||||
and not alert.resolved_action_run
|
||||
):
|
||||
r = agent.run_script(
|
||||
@@ -425,8 +433,10 @@ class Alert(models.Model):
|
||||
alert.resolved_action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
||||
)
|
||||
|
||||
def parse_script_args(self, args: list[str]):
|
||||
@@ -451,7 +461,7 @@ class Alert(models.Model):
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(log_type="scripting", message=e)
|
||||
continue
|
||||
|
||||
else:
|
||||
@@ -460,7 +470,7 @@ class Alert(models.Model):
|
||||
return temp_args
|
||||
|
||||
|
||||
class AlertTemplate(models.Model):
|
||||
class AlertTemplate(BaseAuditModel):
|
||||
name = models.CharField(max_length=100)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
@@ -517,6 +527,7 @@ class AlertTemplate(models.Model):
|
||||
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
agent_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# check alert settings
|
||||
check_email_alert_severity = ArrayField(
|
||||
@@ -540,6 +551,7 @@ class AlertTemplate(models.Model):
|
||||
check_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
check_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# task alert settings
|
||||
task_email_alert_severity = ArrayField(
|
||||
@@ -563,6 +575,7 @@ class AlertTemplate(models.Model):
|
||||
task_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
task_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# exclusion settings
|
||||
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
||||
@@ -581,6 +594,13 @@ class AlertTemplate(models.Model):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(alert_template):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import AlertTemplateAuditSerializer
|
||||
|
||||
return AlertTemplateAuditSerializer(alert_template).data
|
||||
|
||||
@property
|
||||
def has_agent_settings(self) -> bool:
|
||||
return (
|
||||
|
||||
@@ -119,3 +119,9 @@ class AlertTemplateRelationSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class AlertTemplateAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from alerts.models import Alert
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def unsnooze_alerts() -> str:
|
||||
from .models import Alert
|
||||
|
||||
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
|
||||
snoozed=False, snooze_until=None
|
||||
@@ -22,3 +21,14 @@ def cache_agents_alert_template():
|
||||
agent.set_alert_template()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_resolved_alerts(older_than_days: int) -> str:
|
||||
from .models import Alert
|
||||
|
||||
Alert.objects.filter(resolved=True).filter(
|
||||
alert_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from core.models import CoreSettings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import AutomatedTask
|
||||
from core.models import CoreSettings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
from .serializers import (
|
||||
@@ -330,8 +329,8 @@ class TestAlertsViews(TacticalTestCase):
|
||||
baker.make("clients.Site", alert_template=alert_template, _quantity=3)
|
||||
baker.make("automation.Policy", alert_template=alert_template)
|
||||
core = CoreSettings.objects.first()
|
||||
core.alert_template = alert_template
|
||||
core.save()
|
||||
core.alert_template = alert_template # type: ignore
|
||||
core.save() # type: ignore
|
||||
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/related/" # type: ignore
|
||||
|
||||
@@ -403,16 +402,16 @@ class TestAlertTasks(TacticalTestCase):
|
||||
# assign first Alert Template as to a policy and apply it as default
|
||||
policy.alert_template = alert_templates[0] # type: ignore
|
||||
policy.save() # type: ignore
|
||||
core.workstation_policy = policy
|
||||
core.server_policy = policy
|
||||
core.save()
|
||||
core.workstation_policy = policy # type: ignore
|
||||
core.server_policy = policy # type: ignore
|
||||
core.save() # type: ignore
|
||||
|
||||
self.assertEquals(server.set_alert_template().pk, alert_templates[0].pk) # type: ignore
|
||||
self.assertEquals(workstation.set_alert_template().pk, alert_templates[0].pk) # type: ignore
|
||||
|
||||
# assign second Alert Template to as default alert template
|
||||
core.alert_template = alert_templates[1] # type: ignore
|
||||
core.save()
|
||||
core.save() # type: ignore
|
||||
|
||||
self.assertEquals(workstation.set_alert_template().pk, alert_templates[1].pk) # type: ignore
|
||||
self.assertEquals(server.set_alert_template().pk, alert_templates[1].pk) # type: ignore
|
||||
@@ -514,6 +513,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
)
|
||||
|
||||
from alerts.models import Alert
|
||||
|
||||
agent_dashboard_alert = baker.make_recipe("agents.overdue_agent")
|
||||
@@ -727,7 +727,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
send_email,
|
||||
sleep,
|
||||
):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from checks.models import Check
|
||||
from checks.tasks import (
|
||||
handle_check_email_alert_task,
|
||||
@@ -736,6 +735,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
handle_resolved_check_sms_alert_task,
|
||||
)
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
|
||||
# create test data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_no_settings = baker.make_recipe("agents.agent")
|
||||
@@ -1011,7 +1012,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
send_email,
|
||||
sleep,
|
||||
):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import (
|
||||
handle_resolved_task_email_alert,
|
||||
@@ -1020,6 +1020,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
handle_task_sms_alert,
|
||||
)
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
|
||||
# create test data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_no_settings = baker.make_recipe("agents.agent")
|
||||
@@ -1272,17 +1274,17 @@ class TestAlertTasks(TacticalTestCase):
|
||||
)
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
core.smtp_host = "test.test.com"
|
||||
core.smtp_port = 587
|
||||
core.smtp_recipients = ["recipient@test.com"]
|
||||
core.twilio_account_sid = "test"
|
||||
core.twilio_auth_token = "1234123412341234"
|
||||
core.sms_alert_recipients = ["+1234567890"]
|
||||
core.smtp_host = "test.test.com" # type: ignore
|
||||
core.smtp_port = 587 # type: ignore
|
||||
core.smtp_recipients = ["recipient@test.com"] # type: ignore
|
||||
core.twilio_account_sid = "test" # type: ignore
|
||||
core.twilio_auth_token = "1234123412341234" # type: ignore
|
||||
core.sms_alert_recipients = ["+1234567890"] # type: ignore
|
||||
|
||||
# test sending email with alert template settings
|
||||
core.send_mail("Test", "Test", alert_template=alert_template)
|
||||
core.send_mail("Test", "Test", alert_template=alert_template) # type: ignore
|
||||
|
||||
core.send_sms("Test", alert_template=alert_template)
|
||||
core.send_sms("Test", alert_template=alert_template) # type: ignore
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.tasks.agent_outage_sms_task.delay")
|
||||
@@ -1315,6 +1317,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"alerts.AlertTemplate",
|
||||
is_active=True,
|
||||
agent_always_alert=True,
|
||||
agent_script_actions=False,
|
||||
action=failure_action,
|
||||
action_timeout=30,
|
||||
resolved_action=resolved_action,
|
||||
@@ -1328,6 +1331,14 @@ class TestAlertTasks(TacticalTestCase):
|
||||
|
||||
agent_outages_task()
|
||||
|
||||
# should not have been called since agent_script_actions is set to False
|
||||
nats_cmd.assert_not_called()
|
||||
|
||||
alert_template.agent_script_actions = True # type: ignore
|
||||
alert_template.save() # type: ignore
|
||||
|
||||
agent_outages_task()
|
||||
|
||||
# this is what data should be
|
||||
data = {
|
||||
"func": "runscriptfull",
|
||||
@@ -1340,14 +1351,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# Setup cmd mock
|
||||
success = {
|
||||
"retcode": 0,
|
||||
"stdout": "success!",
|
||||
"stderr": "",
|
||||
"execution_time": 5.0000,
|
||||
}
|
||||
|
||||
nats_cmd.side_effect = ["pong", success]
|
||||
|
||||
# make sure script run results were stored
|
||||
@@ -1398,3 +1401,36 @@ class TestAlertTasks(TacticalTestCase):
|
||||
["-Parameter", f"-Another '{alert.id}'"], # type: ignore
|
||||
alert.parse_script_args(args=args), # type: ignore
|
||||
)
|
||||
|
||||
def test_prune_resolved_alerts(self):
|
||||
from .tasks import prune_resolved_alerts
|
||||
|
||||
# setup data
|
||||
resolved_alerts = baker.make(
|
||||
"alerts.Alert",
|
||||
resolved=True,
|
||||
_quantity=25,
|
||||
)
|
||||
|
||||
alerts = baker.make(
|
||||
"alerts.Alert",
|
||||
resolved=False,
|
||||
_quantity=25,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for alert in resolved_alerts: # type: ignore
|
||||
alert.alert_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
alert.save()
|
||||
days = days + 5
|
||||
|
||||
days = 0
|
||||
for alert in alerts: # type: ignore
|
||||
alert.alert_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
alert.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_resolved_alerts(30)
|
||||
|
||||
self.assertEqual(Alert.objects.count(), 31)
|
||||
|
||||
@@ -20,4 +20,5 @@ urlpatterns = [
|
||||
path("superseded/", views.SupersededWinUpdate.as_view()),
|
||||
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
|
||||
path("<str:agentid>/recovery/", views.AgentRecovery.as_view()),
|
||||
path("<int:pk>/<str:agentid>/histresult/", views.AgentHistoryResult.as_view()),
|
||||
]
|
||||
|
||||
@@ -6,7 +6,6 @@ from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.authtoken.models import Token
|
||||
@@ -15,20 +14,18 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from agents.models import Agent, AgentCustomField
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from agents.models import Agent, AgentHistory
|
||||
from agents.serializers import WinAgentSerializer, AgentHistorySerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
from checks.models import Check
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from checks.utils import bytes2human
|
||||
from logs.models import PendingAction
|
||||
from logs.models import PendingAction, DebugLog
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class CheckIn(APIView):
|
||||
|
||||
@@ -36,6 +33,10 @@ class CheckIn(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request):
|
||||
"""
|
||||
!!! DEPRECATED AS OF AGENT 1.6.0 !!!
|
||||
Endpoint be removed in a future release
|
||||
"""
|
||||
from alerts.models import Alert
|
||||
|
||||
updated = False
|
||||
@@ -182,7 +183,11 @@ class WinUpdates(APIView):
|
||||
|
||||
if reboot:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="windows_updates",
|
||||
message=f"{agent.hostname} is rebooting after updates were installed.",
|
||||
)
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
@@ -350,13 +355,12 @@ class TaskRunner(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
_ = get_object_or_404(Agent, agent_id=agentid)
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
return Response(TaskGOGetSerializer(task).data)
|
||||
|
||||
def patch(self, request, pk, agentid):
|
||||
from alerts.models import Alert
|
||||
from logs.models import AuditLog
|
||||
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
@@ -371,38 +375,7 @@ class TaskRunner(APIView):
|
||||
if task.custom_field:
|
||||
if not task.stderr:
|
||||
|
||||
if AgentCustomField.objects.filter(
|
||||
field=task.custom_field, agent=task.agent
|
||||
).exists():
|
||||
agent_field = AgentCustomField.objects.get(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
else:
|
||||
agent_field = AgentCustomField.objects.create(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
|
||||
# get last line of stdout
|
||||
value = (
|
||||
new_task.stdout
|
||||
if task.collector_all_output
|
||||
else new_task.stdout.split("\n")[-1].strip()
|
||||
)
|
||||
|
||||
if task.custom_field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
agent_field.string_value = value
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "multiple":
|
||||
agent_field.multiple_value = value.split(",")
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "checkbox":
|
||||
agent_field.bool_value = bool(value)
|
||||
agent_field.save()
|
||||
task.save_collector_results()
|
||||
|
||||
status = "passing"
|
||||
else:
|
||||
@@ -419,15 +392,6 @@ class TaskRunner(APIView):
|
||||
else:
|
||||
Alert.handle_alert_failure(new_task)
|
||||
|
||||
AuditLog.objects.create(
|
||||
username=agent.hostname,
|
||||
agent=agent.hostname,
|
||||
object_type="agent",
|
||||
action="task_run",
|
||||
message=f"Scheduled Task {task.name} was run on {agent.hostname}",
|
||||
after_value=AutomatedTask.serialize(new_task),
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -518,6 +482,7 @@ class NewAgent(APIView):
|
||||
action="agent_install",
|
||||
message=f"{request.user} installed new agent {agent.hostname}",
|
||||
after_value=Agent.serialize(agent),
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
return Response(
|
||||
@@ -622,3 +587,16 @@ class AgentRecovery(APIView):
|
||||
reload_nats()
|
||||
|
||||
return Response(ret)
|
||||
|
||||
|
||||
class AgentHistoryResult(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request, agentid, pk):
|
||||
_ = get_object_or_404(Agent, agent_id=agentid)
|
||||
hist = get_object_or_404(AgentHistory, pk=pk)
|
||||
s = AgentHistorySerializer(instance=hist, data=request.data, partial=True)
|
||||
s.is_valid(raise_exception=True)
|
||||
s.save()
|
||||
return Response("ok")
|
||||
|
||||
@@ -33,7 +33,7 @@ class Policy(BaseAuditModel):
|
||||
|
||||
# get old policy if exists
|
||||
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
super(Policy, self).save(old_model=old_policy, *args, **kwargs)
|
||||
|
||||
# generate agent checks only if active and enforced were changed
|
||||
if old_policy:
|
||||
@@ -50,7 +50,7 @@ class Policy(BaseAuditModel):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
super(Policy, self).delete(*args, **kwargs)
|
||||
|
||||
generate_agent_checks_task.delay(agents=agents, create_tasks=True)
|
||||
|
||||
@@ -126,9 +126,9 @@ class Policy(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(policy):
|
||||
# serializes the policy and returns json
|
||||
from .serializers import PolicySerializer
|
||||
from .serializers import PolicyAuditSerializer
|
||||
|
||||
return PolicySerializer(policy).data
|
||||
return PolicyAuditSerializer(policy).data
|
||||
|
||||
@staticmethod
|
||||
def cascade_policy_tasks(agent):
|
||||
|
||||
@@ -89,3 +89,9 @@ class AutoTasksFieldSerializer(ModelSerializer):
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
depth = 1
|
||||
|
||||
|
||||
class PolicyAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Policy
|
||||
fields = "__all__"
|
||||
|
||||
@@ -918,11 +918,13 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.delete_task_on_agent")
|
||||
def test_delete_policy_tasks(self, delete_task_on_agent, create_task):
|
||||
from .tasks import delete_policy_autotasks_task
|
||||
from .tasks import delete_policy_autotasks_task, generate_agent_checks_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
baker.make_recipe("agents.server_agent", policy=policy)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
|
||||
|
||||
delete_policy_autotasks_task(task=tasks[0].id) # type: ignore
|
||||
|
||||
@@ -931,11 +933,13 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.run_win_task")
|
||||
def test_run_policy_task(self, run_win_task, create_task):
|
||||
from .tasks import run_win_policy_autotasks_task
|
||||
from .tasks import run_win_policy_autotasks_task, generate_agent_checks_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
baker.make_recipe("agents.server_agent", policy=policy)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
|
||||
|
||||
run_win_policy_autotasks_task(task=tasks[0].id) # type: ignore
|
||||
|
||||
@@ -944,7 +948,10 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.modify_task_on_agent")
|
||||
def test_update_policy_tasks(self, modify_task_on_agent, create_task):
|
||||
from .tasks import update_policy_autotasks_fields_task
|
||||
from .tasks import (
|
||||
update_policy_autotasks_fields_task,
|
||||
generate_agent_checks_task,
|
||||
)
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
@@ -956,6 +963,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
|
||||
|
||||
tasks[0].enabled = False # type: ignore
|
||||
tasks[0].save() # type: ignore
|
||||
|
||||
@@ -995,6 +1004,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_policy_exclusions(self, create_task):
|
||||
from .tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
baker.make_recipe("checks.memory_check", policy=policy)
|
||||
@@ -1003,6 +1014,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
"agents.agent", policy=policy, monitoring_type="server"
|
||||
)
|
||||
|
||||
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
|
||||
|
||||
# make sure related agents on policy returns correctly
|
||||
self.assertEqual(policy.related_agents().count(), 1) # type: ignore
|
||||
self.assertEqual(agent.agentchecks.count(), 1) # type: ignore
|
||||
|
||||
@@ -6,19 +6,15 @@ from typing import List
|
||||
|
||||
import pytz
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import DateTimeField
|
||||
from django.db.utils import DatabaseError
|
||||
from django.utils import timezone as djangotime
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from packaging import version as pyver
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
RUN_TIME_DAY_CHOICES = [
|
||||
(0, "Monday"),
|
||||
(1, "Tuesday"),
|
||||
@@ -195,12 +191,20 @@ class AutomatedTask(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(task):
|
||||
# serializes the task and returns json
|
||||
from .serializers import TaskSerializer
|
||||
from .serializers import TaskAuditSerializer
|
||||
|
||||
return TaskSerializer(task).data
|
||||
return TaskAuditSerializer(task).data
|
||||
|
||||
def create_policy_task(self, agent=None, policy=None, assigned_check=None):
|
||||
|
||||
# added to allow new policy tasks to be assigned to check only when the agent check exists already
|
||||
if (
|
||||
self.assigned_check
|
||||
and agent
|
||||
and agent.agentchecks.filter(parent_check=self.assigned_check.id).exists()
|
||||
):
|
||||
assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.id)
|
||||
|
||||
# if policy is present, then this task is being copied to another policy
|
||||
# if agent is present, then this task is being created on an agent from a policy
|
||||
# exit if neither are set or if both are set
|
||||
@@ -254,7 +258,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
elif self.task_type == "runonce":
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(agent.timezone)
|
||||
agent_tz = pytz.timezone(agent.timezone) # type: ignore
|
||||
task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone(
|
||||
pytz.utc
|
||||
)
|
||||
@@ -280,7 +284,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
},
|
||||
}
|
||||
|
||||
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse(
|
||||
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse( # type: ignore
|
||||
"1.4.7"
|
||||
):
|
||||
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||
@@ -301,19 +305,25 @@ class AutomatedTask(BaseAuditModel):
|
||||
else:
|
||||
return "error"
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) # type: ignore
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "initial"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in."
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in.", # type: ignore
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully created")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} was successfully created", # type: ignore
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -333,19 +343,25 @@ class AutomatedTask(BaseAuditModel):
|
||||
"enabled": self.enabled,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) # type: ignore
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "notsynced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to modify scheduled task {self.name} on {agent.hostname}. It will try again on next agent checkin"
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to modify scheduled task {self.name} on {agent.hostname}({agent.pk}). It will try again on next agent checkin", # type: ignore
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully modified")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} was successfully modified", # type: ignore
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -362,7 +378,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": self.win_task_name},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10)) # type: ignore
|
||||
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
self.sync_status = "pendingdeletion"
|
||||
@@ -372,13 +388,19 @@ class AutomatedTask(BaseAuditModel):
|
||||
except DatabaseError:
|
||||
pass
|
||||
|
||||
logger.warning(
|
||||
f"{agent.hostname} task {self.name} will be deleted on next checkin"
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} will be deleted on next checkin", # type: ignore
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.delete()
|
||||
logger.info(f"{agent.hostname} task {self.name} was deleted")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname}({agent.pk}) task {self.name} was deleted", # type: ignore
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -391,9 +413,20 @@ class AutomatedTask(BaseAuditModel):
|
||||
.first()
|
||||
)
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False))
|
||||
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False)) # type: ignore
|
||||
return "ok"
|
||||
|
||||
def save_collector_results(self):
|
||||
|
||||
agent_field = self.custom_field.get_or_create_field_value(self.agent)
|
||||
|
||||
value = (
|
||||
self.stdout.strip()
|
||||
if self.collector_all_output
|
||||
else self.stdout.strip().split("\n")[-1].strip()
|
||||
)
|
||||
agent_field.save_to_field(value)
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
return (
|
||||
self.dashboard_alert
|
||||
@@ -424,7 +457,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, self.agent.alert_template)
|
||||
CORE.send_mail(subject, body, self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_sms(self):
|
||||
from core.models import CoreSettings
|
||||
@@ -441,7 +474,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_resolved_email(self):
|
||||
from core.models import CoreSettings
|
||||
@@ -453,7 +486,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_resolved_sms(self):
|
||||
from core.models import CoreSettings
|
||||
@@ -464,4 +497,4 @@ class AutomatedTask(BaseAuditModel):
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
@@ -84,3 +84,9 @@ class TaskRunnerPatchSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class TaskAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
from logging import log
|
||||
import random
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from logs.models import DebugLog
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@app.task
|
||||
def create_win_task_schedule(pk):
|
||||
@@ -53,12 +51,20 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
|
||||
logger.info(f"Orphaned task cleanup initiated on {agent.hostname}.")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Orphaned task cleanup initiated on {agent.hostname}.",
|
||||
)
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
|
||||
|
||||
if not isinstance(r, list) and not r: # empty list
|
||||
logger.error(f"Unable to clean up scheduled tasks on {agent.hostname}: {r}")
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to clean up scheduled tasks on {agent.hostname}: {r}",
|
||||
)
|
||||
return "notlist"
|
||||
|
||||
agent_task_names = list(agent.autotasks.values_list("win_task_name", flat=True))
|
||||
@@ -83,13 +89,23 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
}
|
||||
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if ret != "ok":
|
||||
logger.error(
|
||||
f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}",
|
||||
)
|
||||
else:
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Removed orphaned task {task} from {agent.hostname}",
|
||||
)
|
||||
|
||||
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Orphaned task cleanup finished on {agent.hostname}",
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
|
||||
@@ -12,10 +12,6 @@ from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
CHECK_TYPE_CHOICES = [
|
||||
("diskspace", "Disk Space Check"),
|
||||
@@ -475,9 +471,9 @@ class Check(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(check):
|
||||
# serializes the check and returns json
|
||||
from .serializers import CheckSerializer
|
||||
from .serializers import CheckAuditSerializer
|
||||
|
||||
return CheckSerializer(check).data
|
||||
return CheckAuditSerializer(check).data
|
||||
|
||||
# for policy diskchecks
|
||||
@staticmethod
|
||||
|
||||
@@ -220,3 +220,9 @@ class CheckHistorySerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CheckHistory
|
||||
fields = ("x", "y", "results")
|
||||
|
||||
|
||||
class CheckAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Check
|
||||
fields = "__all__"
|
||||
|
||||
@@ -33,13 +33,17 @@ class Client(BaseAuditModel):
|
||||
blank=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kw):
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old client if exists
|
||||
old_client = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kw)
|
||||
old_client = Client.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Client, self).save(
|
||||
old_model=old_client,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_client:
|
||||
@@ -50,7 +54,6 @@ class Client(BaseAuditModel):
|
||||
old_client.block_policy_inheritance != self.block_policy_inheritance
|
||||
)
|
||||
):
|
||||
|
||||
generate_agent_checks_task.delay(
|
||||
client=self.pk,
|
||||
create_tasks=True,
|
||||
@@ -120,10 +123,10 @@ class Client(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def serialize(client):
|
||||
# serializes the client and returns json
|
||||
from .serializers import ClientSerializer
|
||||
from .serializers import ClientAuditSerializer
|
||||
|
||||
return ClientSerializer(client).data
|
||||
# serializes the client and returns json
|
||||
return ClientAuditSerializer(client).data
|
||||
|
||||
|
||||
class Site(BaseAuditModel):
|
||||
@@ -153,13 +156,17 @@ class Site(BaseAuditModel):
|
||||
blank=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kw):
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old client if exists
|
||||
old_site = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(*args, **kw)
|
||||
old_site = Site.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(
|
||||
old_model=old_site,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_site:
|
||||
@@ -168,11 +175,10 @@ class Site(BaseAuditModel):
|
||||
or (old_site.workstation_policy != self.workstation_policy)
|
||||
or (old_site.block_policy_inheritance != self.block_policy_inheritance)
|
||||
):
|
||||
|
||||
generate_agent_checks_task.delay(site=self.pk, create_tasks=True)
|
||||
|
||||
if old_site.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
if old_site.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
@@ -233,10 +239,10 @@ class Site(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def serialize(site):
|
||||
# serializes the site and returns json
|
||||
from .serializers import SiteSerializer
|
||||
from .serializers import SiteAuditSerializer
|
||||
|
||||
return SiteSerializer(site).data
|
||||
# serializes the site and returns json
|
||||
return SiteAuditSerializer(site).data
|
||||
|
||||
|
||||
MON_TYPE_CHOICES = [
|
||||
@@ -308,6 +314,22 @@ class ClientCustomField(models.Model):
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif type == "multiple":
|
||||
self.multiple_value = value.split(",")
|
||||
self.save()
|
||||
elif type == "checkbox":
|
||||
self.bool_value = bool(value)
|
||||
self.save()
|
||||
|
||||
|
||||
class SiteCustomField(models.Model):
|
||||
site = models.ForeignKey(
|
||||
@@ -342,3 +364,19 @@ class SiteCustomField(models.Model):
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif type == "multiple":
|
||||
self.multiple_value = value.split(",")
|
||||
self.save()
|
||||
elif type == "checkbox":
|
||||
self.bool_value = bool(value)
|
||||
self.save()
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField, ValidationError
|
||||
from django.db.models.base import Model
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
Serializer,
|
||||
ValidationError,
|
||||
)
|
||||
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
|
||||
@@ -134,3 +140,15 @@ class DeploymentSerializer(ModelSerializer):
|
||||
"install_flags",
|
||||
"created",
|
||||
]
|
||||
|
||||
|
||||
class SiteAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ClientAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = "__all__"
|
||||
|
||||
@@ -3,10 +3,8 @@ import re
|
||||
import uuid
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
@@ -26,8 +24,6 @@ from .serializers import (
|
||||
SiteSerializer,
|
||||
)
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class GetAddClients(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageClientsPerms]
|
||||
|
||||
@@ -2,6 +2,7 @@ from django.core.management.base import BaseCommand
|
||||
|
||||
from logs.models import PendingAction
|
||||
from scripts.models import Script
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -13,3 +14,9 @@ class Command(BaseCommand):
|
||||
|
||||
# load community scripts into the db
|
||||
Script.load_community_scripts()
|
||||
|
||||
# make sure installer user is set to block_dashboard_logins
|
||||
if User.objects.filter(is_installer_user=True).exists():
|
||||
for user in User.objects.filter(is_installer_user=True):
|
||||
user.block_dashboard_login = True
|
||||
user.save()
|
||||
|
||||
23
api/tacticalrmm/core/migrations/0024_auto_20210707_1828.py
Normal file
23
api/tacticalrmm/core/migrations/0024_auto_20210707_1828.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-07 18:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0023_coresettings_clear_faults_days'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='agent_history_prune_days',
|
||||
field=models.PositiveIntegerField(default=30),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='resolved_alerts_prune_days',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/core/migrations/0025_auto_20210707_1835.py
Normal file
28
api/tacticalrmm/core/migrations/0025_auto_20210707_1835.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-07 18:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0024_auto_20210707_1828'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='agent_debug_level',
|
||||
field=models.CharField(choices=[('info', 'Info'), ('warning', 'Warning'), ('error', 'Error'), ('critical', 'Critical')], default='info', max_length=20),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='debug_log_prune_days',
|
||||
field=models.PositiveIntegerField(default=30),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='coresettings',
|
||||
name='agent_history_prune_days',
|
||||
field=models.PositiveIntegerField(default=60),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 17:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0025_auto_20210707_1835'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='coresettings',
|
||||
name='audit_log_prune_days',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
73
api/tacticalrmm/core/migrations/0027_auto_20210905_1606.py
Normal file
73
api/tacticalrmm/core/migrations/0027_auto_20210905_1606.py
Normal file
@@ -0,0 +1,73 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-05 16:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0026_coresettings_audit_log_prune_days'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='customfield',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='customfield',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='customfield',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='customfield',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='globalkvstore',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='globalkvstore',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='globalkvstore',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='globalkvstore',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='urlaction',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='urlaction',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='urlaction',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='urlaction',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,17 +1,15 @@
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
from django.db.models.enums import Choices
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
from twilio.rest import Client as TwClient
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import BaseAuditModel, DebugLog, LOG_LEVEL_CHOICES
|
||||
|
||||
TZ_CHOICES = [(_, _) for _ in pytz.all_timezones]
|
||||
|
||||
@@ -51,6 +49,13 @@ class CoreSettings(BaseAuditModel):
|
||||
)
|
||||
# removes check history older than days
|
||||
check_history_prune_days = models.PositiveIntegerField(default=30)
|
||||
resolved_alerts_prune_days = models.PositiveIntegerField(default=0)
|
||||
agent_history_prune_days = models.PositiveIntegerField(default=60)
|
||||
debug_log_prune_days = models.PositiveIntegerField(default=30)
|
||||
audit_log_prune_days = models.PositiveIntegerField(default=0)
|
||||
agent_debug_level = models.CharField(
|
||||
max_length=20, choices=LOG_LEVEL_CHOICES, default="info"
|
||||
)
|
||||
clear_faults_days = models.IntegerField(default=0)
|
||||
mesh_token = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||
mesh_username = models.CharField(max_length=255, null=True, blank=True, default="")
|
||||
@@ -184,14 +189,14 @@ class CoreSettings(BaseAuditModel):
|
||||
server.quit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Sending email failed with error: {e}")
|
||||
DebugLog.error(message=f"Sending email failed with error: {e}")
|
||||
if test:
|
||||
return str(e)
|
||||
else:
|
||||
return True
|
||||
|
||||
def send_sms(self, body, alert_template=None):
|
||||
if not alert_template and not self.sms_is_configured:
|
||||
if not alert_template or not self.sms_is_configured:
|
||||
return
|
||||
|
||||
# override email recipients if alert_template is passed and is set
|
||||
@@ -205,7 +210,7 @@ class CoreSettings(BaseAuditModel):
|
||||
try:
|
||||
tw_client.messages.create(body=body, to=num, from_=self.twilio_number)
|
||||
except Exception as e:
|
||||
logger.error(f"SMS failed to send: {e}")
|
||||
DebugLog.error(message=f"SMS failed to send: {e}")
|
||||
|
||||
@staticmethod
|
||||
def serialize(core):
|
||||
@@ -227,7 +232,7 @@ FIELD_TYPE_CHOICES = (
|
||||
MODEL_CHOICES = (("client", "Client"), ("site", "Site"), ("agent", "Agent"))
|
||||
|
||||
|
||||
class CustomField(models.Model):
|
||||
class CustomField(BaseAuditModel):
|
||||
|
||||
order = models.PositiveIntegerField(default=0)
|
||||
model = models.CharField(max_length=25, choices=MODEL_CHOICES)
|
||||
@@ -256,6 +261,12 @@ class CustomField(models.Model):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(field):
|
||||
from .serializers import CustomFieldSerializer
|
||||
|
||||
return CustomFieldSerializer(field).data
|
||||
|
||||
@property
|
||||
def default_value(self):
|
||||
if self.type == "multiple":
|
||||
@@ -265,6 +276,26 @@ class CustomField(models.Model):
|
||||
else:
|
||||
return self.default_value_string
|
||||
|
||||
def get_or_create_field_value(self, instance):
|
||||
from agents.models import Agent, AgentCustomField
|
||||
from clients.models import Client, ClientCustomField, Site, SiteCustomField
|
||||
|
||||
if isinstance(instance, Agent):
|
||||
if AgentCustomField.objects.filter(field=self, agent=instance).exists():
|
||||
return AgentCustomField.objects.get(field=self, agent=instance)
|
||||
else:
|
||||
return AgentCustomField.objects.create(field=self, agent=instance)
|
||||
elif isinstance(instance, Client):
|
||||
if ClientCustomField.objects.filter(field=self, client=instance).exists():
|
||||
return ClientCustomField.objects.get(field=self, client=instance)
|
||||
else:
|
||||
return ClientCustomField.objects.create(field=self, client=instance)
|
||||
elif isinstance(instance, Site):
|
||||
if SiteCustomField.objects.filter(field=self, site=instance).exists():
|
||||
return SiteCustomField.objects.get(field=self, site=instance)
|
||||
else:
|
||||
return SiteCustomField.objects.create(field=self, site=instance)
|
||||
|
||||
|
||||
class CodeSignToken(models.Model):
|
||||
token = models.CharField(max_length=255, null=True, blank=True)
|
||||
@@ -279,19 +310,34 @@ class CodeSignToken(models.Model):
|
||||
return "Code signing token"
|
||||
|
||||
|
||||
class GlobalKVStore(models.Model):
|
||||
class GlobalKVStore(BaseAuditModel):
|
||||
name = models.CharField(max_length=25)
|
||||
value = models.TextField()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(store):
|
||||
from .serializers import KeyStoreSerializer
|
||||
|
||||
class URLAction(models.Model):
|
||||
return KeyStoreSerializer(store).data
|
||||
|
||||
|
||||
class URLAction(BaseAuditModel):
|
||||
name = models.CharField(max_length=25)
|
||||
desc = models.CharField(max_length=100, null=True, blank=True)
|
||||
pattern = models.TextField()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(action):
|
||||
from .serializers import URLActionSerializer
|
||||
|
||||
return URLActionSerializer(action).data
|
||||
|
||||
|
||||
RUN_ON_CHOICES = (
|
||||
("client", "Client"),
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
from checks.tasks import prune_check_history
|
||||
from agents.tasks import clear_faults_task
|
||||
from agents.tasks import clear_faults_task, prune_agent_history
|
||||
from alerts.tasks import prune_resolved_alerts
|
||||
from core.models import CoreSettings
|
||||
from logs.tasks import prune_debug_log, prune_audit_log
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@app.task
|
||||
def core_maintenance_tasks():
|
||||
@@ -32,18 +30,37 @@ def core_maintenance_tasks():
|
||||
core = CoreSettings.objects.first()
|
||||
|
||||
# remove old CheckHistory data
|
||||
if core.check_history_prune_days > 0:
|
||||
prune_check_history.delay(core.check_history_prune_days)
|
||||
if core.check_history_prune_days > 0: # type: ignore
|
||||
prune_check_history.delay(core.check_history_prune_days) # type: ignore
|
||||
|
||||
# remove old resolved alerts
|
||||
if core.resolved_alerts_prune_days > 0: # type: ignore
|
||||
prune_resolved_alerts.delay(core.resolved_alerts_prune_days) # type: ignore
|
||||
|
||||
# remove old agent history
|
||||
if core.agent_history_prune_days > 0: # type: ignore
|
||||
prune_agent_history.delay(core.agent_history_prune_days) # type: ignore
|
||||
|
||||
# remove old debug logs
|
||||
if core.debug_log_prune_days > 0: # type: ignore
|
||||
prune_debug_log.delay(core.debug_log_prune_days) # type: ignore
|
||||
|
||||
# remove old audit logs
|
||||
if core.audit_log_prune_days > 0: # type: ignore
|
||||
prune_audit_log.delay(core.audit_log_prune_days) # type: ignore
|
||||
|
||||
# clear faults
|
||||
if core.clear_faults_days > 0:
|
||||
clear_faults_task.delay(core.clear_faults_days)
|
||||
if core.clear_faults_days > 0: # type: ignore
|
||||
clear_faults_task.delay(core.clear_faults_days) # type: ignore
|
||||
|
||||
|
||||
@app.task
|
||||
def cache_db_fields_task():
|
||||
from agents.models import Agent
|
||||
|
||||
for agent in Agent.objects.all():
|
||||
for agent in Agent.objects.prefetch_related("winupdates", "pendingactions").only(
|
||||
"pending_actions_count", "has_patches_pending", "pk"
|
||||
):
|
||||
agent.pending_actions_count = agent.pendingactions.filter(
|
||||
status="pending"
|
||||
).count()
|
||||
|
||||
@@ -3,7 +3,9 @@ import pprint
|
||||
import re
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models.fields import IPAddressField
|
||||
from django.shortcuts import get_object_or_404
|
||||
from logs.models import AuditLog
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.exceptions import ParseError
|
||||
@@ -346,9 +348,18 @@ class RunURLAction(APIView):
|
||||
from requests.utils import requote_uri
|
||||
|
||||
from agents.models import Agent
|
||||
from clients.models import Client, Site
|
||||
from tacticalrmm.utils import replace_db_values
|
||||
|
||||
agent = get_object_or_404(Agent, pk=request.data["agent"])
|
||||
if "agent" in request.data.keys():
|
||||
instance = get_object_or_404(Agent, pk=request.data["agent"])
|
||||
elif "site" in request.data.keys():
|
||||
instance = get_object_or_404(Site, pk=request.data["site"])
|
||||
elif "client" in request.data.keys():
|
||||
instance = get_object_or_404(Client, pk=request.data["client"])
|
||||
else:
|
||||
return notify_error("received an incorrect request")
|
||||
|
||||
action = get_object_or_404(URLAction, pk=request.data["action"])
|
||||
|
||||
pattern = re.compile("\\{\\{([\\w\\s]+\\.[\\w\\s]+)\\}\\}")
|
||||
@@ -356,10 +367,17 @@ class RunURLAction(APIView):
|
||||
url_pattern = action.pattern
|
||||
|
||||
for string in re.findall(pattern, action.pattern):
|
||||
value = replace_db_values(string=string, agent=agent, quotes=False)
|
||||
value = replace_db_values(string=string, instance=instance, quotes=False)
|
||||
|
||||
url_pattern = re.sub("\\{\\{" + string + "\\}\\}", str(value), url_pattern)
|
||||
|
||||
AuditLog.audit_url_action(
|
||||
username=request.user.username,
|
||||
urlaction=action,
|
||||
instance=instance,
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
return Response(requote_uri(url_pattern))
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import AuditLog, PendingAction
|
||||
from .models import AuditLog, PendingAction, DebugLog
|
||||
|
||||
admin.site.register(PendingAction)
|
||||
admin.site.register(AuditLog)
|
||||
admin.site.register(DebugLog)
|
||||
|
||||
68
api/tacticalrmm/logs/migrations/0013_auto_20210614_1835.py
Normal file
68
api/tacticalrmm/logs/migrations/0013_auto_20210614_1835.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# Generated by Django 3.2.1 on 2021-06-14 18:35
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("logs", "0012_auto_20210228_0943"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="agent",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="debuglogs",
|
||||
to="agents.agent",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="entry_time",
|
||||
field=models.DateTimeField(
|
||||
auto_now_add=True, default=django.utils.timezone.now
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="log_level",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("info", "Info"),
|
||||
("warning", "Warning"),
|
||||
("error", "Error"),
|
||||
("critical", "Critical"),
|
||||
],
|
||||
default="info",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="log_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("agent_update", "Agent Update"),
|
||||
("agent_issues", "Agent Issues"),
|
||||
("win_updates", "Windows Updates"),
|
||||
("system_issues", "System Issues"),
|
||||
("scripting", "Scripting"),
|
||||
],
|
||||
default="system_issues",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="debuglog",
|
||||
name="message",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/logs/migrations/0014_auditlog_agent_id.py
Normal file
18
api/tacticalrmm/logs/migrations/0014_auditlog_agent_id.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-06-28 02:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0013_auto_20210614_1835'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='auditlog',
|
||||
name='agent_id',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0014_auditlog_agent_id'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='auditlog',
|
||||
name='object_type',
|
||||
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alert_template', 'Alert Template'), ('role', 'Role')], max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 17:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0015_alter_auditlog_object_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='auditlog',
|
||||
name='object_type',
|
||||
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alerttemplate', 'Alert Template'), ('role', 'Role')], max_length=100),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/logs/migrations/0017_auto_20210731_1707.py
Normal file
23
api/tacticalrmm/logs/migrations/0017_auto_20210731_1707.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-31 17:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0016_alter_auditlog_object_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='pendingaction',
|
||||
name='cancelable',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pendingaction',
|
||||
name='action_type',
|
||||
field=models.CharField(blank=True, choices=[('schedreboot', 'Scheduled Reboot'), ('taskaction', 'Scheduled Task Action'), ('agentupdate', 'Agent Update'), ('chocoinstall', 'Chocolatey Software Install'), ('runcmd', 'Run Command'), ('runscript', 'Run Script'), ('runpatchscan', 'Run Patch Scan'), ('runpatchinstall', 'Run Patch Install')], max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/logs/migrations/0018_auto_20210905_1606.py
Normal file
23
api/tacticalrmm/logs/migrations/0018_auto_20210905_1606.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-05 16:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0017_auto_20210731_1707'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='auditlog',
|
||||
name='action',
|
||||
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command'), ('bulk_action', 'Bulk Action'), ('url_action', 'URL Action')], max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='auditlog',
|
||||
name='object_type',
|
||||
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alerttemplate', 'Alert Template'), ('role', 'Role'), ('urlaction', 'URL Action'), ('keystore', 'Global Key Store'), ('customfield', 'Custom Field')], max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -2,14 +2,24 @@ import datetime as dt
|
||||
from abc import abstractmethod
|
||||
|
||||
from django.db import models
|
||||
|
||||
from tacticalrmm.middleware import get_debug_info, get_username
|
||||
|
||||
|
||||
def get_debug_level():
|
||||
from core.models import CoreSettings
|
||||
|
||||
return CoreSettings.objects.first().agent_debug_level # type: ignore
|
||||
|
||||
|
||||
ACTION_TYPE_CHOICES = [
|
||||
("schedreboot", "Scheduled Reboot"),
|
||||
("taskaction", "Scheduled Task Action"), # deprecated
|
||||
("agentupdate", "Agent Update"),
|
||||
("chocoinstall", "Chocolatey Software Install"),
|
||||
("runcmd", "Run Command"),
|
||||
("runscript", "Run Script"),
|
||||
("runpatchscan", "Run Patch Scan"),
|
||||
("runpatchinstall", "Run Patch Install"),
|
||||
]
|
||||
|
||||
AUDIT_ACTION_TYPE_CHOICES = [
|
||||
@@ -26,6 +36,7 @@ AUDIT_ACTION_TYPE_CHOICES = [
|
||||
("execute_script", "Execute Script"),
|
||||
("execute_command", "Execute Command"),
|
||||
("bulk_action", "Bulk Action"),
|
||||
("url_action", "URL Action"),
|
||||
]
|
||||
|
||||
AUDIT_OBJECT_TYPE_CHOICES = [
|
||||
@@ -40,6 +51,11 @@ AUDIT_OBJECT_TYPE_CHOICES = [
|
||||
("automatedtask", "Automated Task"),
|
||||
("coresettings", "Core Settings"),
|
||||
("bulk", "Bulk"),
|
||||
("alerttemplate", "Alert Template"),
|
||||
("role", "Role"),
|
||||
("urlaction", "URL Action"),
|
||||
("keystore", "Global Key Store"),
|
||||
("customfield", "Custom Field"),
|
||||
]
|
||||
|
||||
STATUS_CHOICES = [
|
||||
@@ -51,6 +67,7 @@ STATUS_CHOICES = [
|
||||
class AuditLog(models.Model):
|
||||
username = models.CharField(max_length=100)
|
||||
agent = models.CharField(max_length=255, null=True, blank=True)
|
||||
agent_id = models.PositiveIntegerField(blank=True, null=True)
|
||||
entry_time = models.DateTimeField(auto_now_add=True)
|
||||
action = models.CharField(max_length=100, choices=AUDIT_ACTION_TYPE_CHOICES)
|
||||
object_type = models.CharField(max_length=100, choices=AUDIT_OBJECT_TYPE_CHOICES)
|
||||
@@ -73,24 +90,25 @@ class AuditLog(models.Model):
|
||||
return super(AuditLog, self).save(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def audit_mesh_session(username, hostname, debug_info={}):
|
||||
def audit_mesh_session(username, agent, debug_info={}):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
agent=hostname,
|
||||
agent=agent.hostname,
|
||||
agent_id=agent.id,
|
||||
object_type="agent",
|
||||
action="remote_session",
|
||||
message=f"{username} used Mesh Central to initiate a remote session to {hostname}.",
|
||||
message=f"{username} used Mesh Central to initiate a remote session to {agent.hostname}.",
|
||||
debug_info=debug_info,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def audit_raw_command(username, hostname, cmd, shell, debug_info={}):
|
||||
def audit_raw_command(username, agent, cmd, shell, debug_info={}):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
agent=hostname,
|
||||
agent=agent.hostname,
|
||||
object_type="agent",
|
||||
action="execute_command",
|
||||
message=f"{username} issued {shell} command on {hostname}.",
|
||||
message=f"{username} issued {shell} command on {agent.hostname}.",
|
||||
after_value=cmd,
|
||||
debug_info=debug_info,
|
||||
)
|
||||
@@ -102,6 +120,7 @@ class AuditLog(models.Model):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
object_type=object_type,
|
||||
agent_id=before["id"] if object_type == "agent" else None,
|
||||
action="modify",
|
||||
message=f"{username} modified {object_type} {name}",
|
||||
before_value=before,
|
||||
@@ -114,6 +133,7 @@ class AuditLog(models.Model):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
object_type=object_type,
|
||||
agent=after["id"] if object_type == "agent" else None,
|
||||
action="add",
|
||||
message=f"{username} added {object_type} {name}",
|
||||
after_value=after,
|
||||
@@ -125,6 +145,7 @@ class AuditLog(models.Model):
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
object_type=object_type,
|
||||
agent=before["id"] if object_type == "agent" else None,
|
||||
action="delete",
|
||||
message=f"{username} deleted {object_type} {name}",
|
||||
before_value=before,
|
||||
@@ -132,13 +153,14 @@ class AuditLog(models.Model):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def audit_script_run(username, hostname, script, debug_info={}):
|
||||
def audit_script_run(username, agent, script, debug_info={}):
|
||||
AuditLog.objects.create(
|
||||
agent=hostname,
|
||||
agent=agent.hostname,
|
||||
agent_id=agent.id,
|
||||
username=username,
|
||||
object_type="agent",
|
||||
action="execute_script",
|
||||
message=f'{username} ran script: "{script}" on {hostname}',
|
||||
message=f'{username} ran script: "{script}" on {agent.hostname}',
|
||||
debug_info=debug_info,
|
||||
)
|
||||
|
||||
@@ -172,6 +194,21 @@ class AuditLog(models.Model):
|
||||
debug_info=debug_info,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def audit_url_action(username, urlaction, instance, debug_info={}):
|
||||
|
||||
name = instance.hostname if hasattr(instance, "hostname") else instance.name
|
||||
classname = type(instance).__name__
|
||||
AuditLog.objects.create(
|
||||
username=username,
|
||||
agent=instance.hostname if classname == "Agent" else None,
|
||||
agent_id=instance.id if classname == "Agent" else None,
|
||||
object_type=classname.lower(),
|
||||
action="url_action",
|
||||
message=f"{username} ran url action: {urlaction.pattern} on {classname}: {name}",
|
||||
debug_info=debug_info,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def audit_bulk_action(username, action, affected, debug_info={}):
|
||||
from agents.models import Agent
|
||||
@@ -190,13 +227,13 @@ class AuditLog(models.Model):
|
||||
site = Site.objects.get(pk=affected["site"])
|
||||
target = f"on all agents within site: {site.client.name}\\{site.name}"
|
||||
elif affected["target"] == "agents":
|
||||
agents = Agent.objects.filter(pk__in=affected["agentPKs"]).values_list(
|
||||
agents = Agent.objects.filter(pk__in=affected["agents"]).values_list(
|
||||
"hostname", flat=True
|
||||
)
|
||||
target = "on multiple agents"
|
||||
|
||||
if action == "script":
|
||||
script = Script.objects.get(pk=affected["scriptPK"])
|
||||
script = Script.objects.get(pk=affected["script"])
|
||||
action = f"script: {script.name}"
|
||||
|
||||
if agents:
|
||||
@@ -212,8 +249,71 @@ class AuditLog(models.Model):
|
||||
)
|
||||
|
||||
|
||||
LOG_LEVEL_CHOICES = [
|
||||
("info", "Info"),
|
||||
("warning", "Warning"),
|
||||
("error", "Error"),
|
||||
("critical", "Critical"),
|
||||
]
|
||||
|
||||
LOG_TYPE_CHOICES = [
|
||||
("agent_update", "Agent Update"),
|
||||
("agent_issues", "Agent Issues"),
|
||||
("win_updates", "Windows Updates"),
|
||||
("system_issues", "System Issues"),
|
||||
("scripting", "Scripting"),
|
||||
]
|
||||
|
||||
|
||||
class DebugLog(models.Model):
|
||||
pass
|
||||
entry_time = models.DateTimeField(auto_now_add=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="debuglogs",
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
log_level = models.CharField(
|
||||
max_length=50, choices=LOG_LEVEL_CHOICES, default="info"
|
||||
)
|
||||
log_type = models.CharField(
|
||||
max_length=50, choices=LOG_TYPE_CHOICES, default="system_issues"
|
||||
)
|
||||
message = models.TextField(null=True, blank=True)
|
||||
|
||||
@classmethod
|
||||
def info(
|
||||
cls,
|
||||
message,
|
||||
agent=None,
|
||||
log_type="system_issues",
|
||||
):
|
||||
if get_debug_level() in ["info"]:
|
||||
cls.objects.create(
|
||||
log_level="info", agent=agent, log_type=log_type, message=message
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def warning(cls, message, agent=None, log_type="system_issues"):
|
||||
if get_debug_level() in ["info", "warning"]:
|
||||
cls.objects.create(
|
||||
log_level="warning", agent=agent, log_type=log_type, message=message
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def error(cls, message, agent=None, log_type="system_issues"):
|
||||
if get_debug_level() in ["info", "warning", "error"]:
|
||||
cls.objects.create(
|
||||
log_level="error", agent=agent, log_type=log_type, message=message
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def critical(cls, message, agent=None, log_type="system_issues"):
|
||||
if get_debug_level() in ["info", "warning", "error", "critical"]:
|
||||
cls.objects.create(
|
||||
log_level="critical", agent=agent, log_type=log_type, message=message
|
||||
)
|
||||
|
||||
|
||||
class PendingAction(models.Model):
|
||||
@@ -232,6 +332,7 @@ class PendingAction(models.Model):
|
||||
choices=STATUS_CHOICES,
|
||||
default="pending",
|
||||
)
|
||||
cancelable = models.BooleanField(blank=True, default=False)
|
||||
celery_id = models.CharField(null=True, blank=True, max_length=255)
|
||||
details = models.JSONField(null=True, blank=True)
|
||||
|
||||
@@ -247,6 +348,8 @@ class PendingAction(models.Model):
|
||||
return "Next update cycle"
|
||||
elif self.action_type == "chocoinstall":
|
||||
return "ASAP"
|
||||
else:
|
||||
return "On next checkin"
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
@@ -259,6 +362,14 @@ class PendingAction(models.Model):
|
||||
elif self.action_type == "chocoinstall":
|
||||
return f"{self.details['name']} software install"
|
||||
|
||||
elif self.action_type in [
|
||||
"runcmd",
|
||||
"runscript",
|
||||
"runpatchscan",
|
||||
"runpatchinstall",
|
||||
]:
|
||||
return f"{self.action_type}"
|
||||
|
||||
|
||||
class BaseAuditModel(models.Model):
|
||||
# abstract base class for auditing models
|
||||
@@ -275,13 +386,14 @@ class BaseAuditModel(models.Model):
|
||||
def serialize():
|
||||
pass
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def save(self, old_model=None, *args, **kwargs):
|
||||
|
||||
if get_username():
|
||||
|
||||
before_value = {}
|
||||
object_class = type(self)
|
||||
object_name = object_class.__name__.lower()
|
||||
username = get_username()
|
||||
after_value = object_class.serialize(self) # type: ignore
|
||||
|
||||
# populate created_by and modified_by fields on instance
|
||||
if not getattr(self, "created_by", None):
|
||||
@@ -289,32 +401,37 @@ class BaseAuditModel(models.Model):
|
||||
if hasattr(self, "modified_by"):
|
||||
self.modified_by = username
|
||||
|
||||
# capture object properties before edit
|
||||
if self.pk:
|
||||
before_value = object_class.objects.get(pk=self.id)
|
||||
|
||||
# dont create entry for agent add since that is done in view
|
||||
if not self.pk:
|
||||
AuditLog.audit_object_add(
|
||||
username,
|
||||
object_name,
|
||||
object_class.serialize(self),
|
||||
after_value, # type: ignore
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
else:
|
||||
AuditLog.audit_object_changed(
|
||||
username,
|
||||
object_class.__name__.lower(),
|
||||
object_class.serialize(before_value),
|
||||
object_class.serialize(self),
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
|
||||
return super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
if old_model:
|
||||
before_value = object_class.serialize(old_model) # type: ignore
|
||||
else:
|
||||
before_value = object_class.serialize(object_class.objects.get(pk=self.pk)) # type: ignore
|
||||
# only create an audit entry if the values have changed
|
||||
if before_value != after_value: # type: ignore
|
||||
|
||||
AuditLog.audit_object_changed(
|
||||
username,
|
||||
object_class.__name__.lower(),
|
||||
before_value,
|
||||
after_value, # type: ignore
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
|
||||
if get_username():
|
||||
|
||||
@@ -322,9 +439,7 @@ class BaseAuditModel(models.Model):
|
||||
AuditLog.audit_object_delete(
|
||||
get_username(),
|
||||
object_class.__name__.lower(),
|
||||
object_class.serialize(self),
|
||||
object_class.serialize(self), # type: ignore
|
||||
self.__str__(),
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
|
||||
return super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
|
||||
@@ -1,25 +1,23 @@
|
||||
from rest_framework import serializers
|
||||
|
||||
from tacticalrmm.utils import get_default_timezone
|
||||
|
||||
from .models import AuditLog, PendingAction
|
||||
from .models import AuditLog, DebugLog, PendingAction
|
||||
|
||||
|
||||
class AuditLogSerializer(serializers.ModelSerializer):
|
||||
|
||||
entry_time = serializers.SerializerMethodField(read_only=True)
|
||||
ip_address = serializers.ReadOnlyField(source="debug_info.ip")
|
||||
|
||||
class Meta:
|
||||
model = AuditLog
|
||||
fields = "__all__"
|
||||
|
||||
def get_entry_time(self, log):
|
||||
timezone = get_default_timezone()
|
||||
return log.entry_time.astimezone(timezone).strftime("%m %d %Y %H:%M:%S")
|
||||
tz = self.context["default_tz"]
|
||||
return log.entry_time.astimezone(tz).strftime("%m %d %Y %H:%M:%S")
|
||||
|
||||
|
||||
class PendingActionSerializer(serializers.ModelSerializer):
|
||||
|
||||
hostname = serializers.ReadOnlyField(source="agent.hostname")
|
||||
salt_id = serializers.ReadOnlyField(source="agent.salt_id")
|
||||
client = serializers.ReadOnlyField(source="agent.client.name")
|
||||
@@ -30,3 +28,16 @@ class PendingActionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = PendingAction
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class DebugLogSerializer(serializers.ModelSerializer):
|
||||
agent = serializers.ReadOnlyField(source="agent.hostname")
|
||||
entry_time = serializers.SerializerMethodField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = DebugLog
|
||||
fields = "__all__"
|
||||
|
||||
def get_entry_time(self, log):
|
||||
tz = self.context["default_tz"]
|
||||
return log.entry_time.astimezone(tz).strftime("%m %d %Y %H:%M:%S")
|
||||
|
||||
25
api/tacticalrmm/logs/tasks.py
Normal file
25
api/tacticalrmm/logs/tasks.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_debug_log(older_than_days: int) -> str:
|
||||
from .models import DebugLog
|
||||
|
||||
DebugLog.objects.filter(
|
||||
entry_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_audit_log(older_than_days: int) -> str:
|
||||
from .models import AuditLog
|
||||
|
||||
AuditLog.objects.filter(
|
||||
entry_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
@@ -1,10 +1,11 @@
|
||||
from datetime import datetime, timedelta
|
||||
from itertools import cycle
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class TestAuditViews(TacticalTestCase):
|
||||
@@ -16,20 +17,23 @@ class TestAuditViews(TacticalTestCase):
|
||||
|
||||
# create clients for client filter
|
||||
site = baker.make("clients.Site")
|
||||
baker.make_recipe("agents.agent", site=site, hostname="AgentHostname1")
|
||||
agent1 = baker.make_recipe("agents.agent", site=site, hostname="AgentHostname1")
|
||||
agent2 = baker.make_recipe("agents.agent", hostname="AgentHostname2")
|
||||
agent0 = baker.make_recipe("agents.agent", hostname="AgentHostname")
|
||||
|
||||
# user jim agent logs
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
username="jim",
|
||||
agent="AgentHostname1",
|
||||
entry_time=seq(datetime.now(), timedelta(days=3)),
|
||||
agent_id=agent1.id,
|
||||
_quantity=15,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
username="jim",
|
||||
agent="AgentHostname2",
|
||||
entry_time=seq(datetime.now(), timedelta(days=100)),
|
||||
agent_id=agent2.id,
|
||||
_quantity=8,
|
||||
)
|
||||
|
||||
@@ -38,14 +42,14 @@ class TestAuditViews(TacticalTestCase):
|
||||
"logs.agent_logs",
|
||||
username="james",
|
||||
agent="AgentHostname1",
|
||||
entry_time=seq(datetime.now(), timedelta(days=55)),
|
||||
agent_id=agent1.id,
|
||||
_quantity=7,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
username="james",
|
||||
agent="AgentHostname2",
|
||||
entry_time=seq(datetime.now(), timedelta(days=20)),
|
||||
agent_id=agent2.id,
|
||||
_quantity=10,
|
||||
)
|
||||
|
||||
@@ -53,7 +57,7 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.agent_logs",
|
||||
agent=seq("AgentHostname"),
|
||||
entry_time=seq(datetime.now(), timedelta(days=29)),
|
||||
agent_id=seq(agent1.id),
|
||||
_quantity=5,
|
||||
)
|
||||
|
||||
@@ -61,7 +65,6 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.object_logs",
|
||||
username="james",
|
||||
entry_time=seq(datetime.now(), timedelta(days=5)),
|
||||
_quantity=17,
|
||||
)
|
||||
|
||||
@@ -69,7 +72,6 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.login_logs",
|
||||
username="james",
|
||||
entry_time=seq(datetime.now(), timedelta(days=7)),
|
||||
_quantity=11,
|
||||
)
|
||||
|
||||
@@ -77,51 +79,62 @@ class TestAuditViews(TacticalTestCase):
|
||||
baker.make_recipe(
|
||||
"logs.login_logs",
|
||||
username="jim",
|
||||
entry_time=seq(datetime.now(), timedelta(days=11)),
|
||||
_quantity=13,
|
||||
)
|
||||
|
||||
return site
|
||||
return {"site": site, "agents": [agent0, agent1, agent2]}
|
||||
|
||||
def test_get_audit_logs(self):
|
||||
url = "/logs/auditlogs/"
|
||||
|
||||
# create data
|
||||
site = self.create_audit_records()
|
||||
data = self.create_audit_records()
|
||||
|
||||
# test data and result counts
|
||||
data = [
|
||||
{"filter": {"timeFilter": 30}, "count": 86},
|
||||
{
|
||||
"filter": {"timeFilter": 45, "agentFilter": ["AgentHostname2"]},
|
||||
"filter": {
|
||||
"timeFilter": 45,
|
||||
"agentFilter": [data["agents"][2].id],
|
||||
},
|
||||
"count": 19,
|
||||
},
|
||||
{
|
||||
"filter": {"userFilter": ["jim"], "agentFilter": ["AgentHostname1"]},
|
||||
"filter": {
|
||||
"userFilter": ["jim"],
|
||||
"agentFilter": [data["agents"][1].id],
|
||||
},
|
||||
"count": 15,
|
||||
},
|
||||
{
|
||||
"filter": {
|
||||
"timeFilter": 180,
|
||||
"userFilter": ["james"],
|
||||
"agentFilter": ["AgentHostname1"],
|
||||
"agentFilter": [data["agents"][1].id],
|
||||
},
|
||||
"count": 7,
|
||||
},
|
||||
{"filter": {}, "count": 86},
|
||||
{"filter": {"agentFilter": ["DoesntExist"]}, "count": 0},
|
||||
{"filter": {"agentFilter": [500]}, "count": 0},
|
||||
{
|
||||
"filter": {
|
||||
"timeFilter": 35,
|
||||
"userFilter": ["james", "jim"],
|
||||
"agentFilter": ["AgentHostname1", "AgentHostname2"],
|
||||
"agentFilter": [
|
||||
data["agents"][1].id,
|
||||
data["agents"][2].id,
|
||||
],
|
||||
},
|
||||
"count": 40,
|
||||
},
|
||||
{"filter": {"timeFilter": 35, "userFilter": ["james", "jim"]}, "count": 81},
|
||||
{"filter": {"objectFilter": ["user"]}, "count": 26},
|
||||
{"filter": {"actionFilter": ["login"]}, "count": 12},
|
||||
{"filter": {"clientFilter": [site.client.id]}, "count": 23},
|
||||
{
|
||||
"filter": {"clientFilter": [data["site"].client.id]},
|
||||
"count": 23,
|
||||
},
|
||||
]
|
||||
|
||||
pagination = {
|
||||
@@ -137,45 +150,15 @@ class TestAuditViews(TacticalTestCase):
|
||||
)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(
|
||||
len(resp.data["audit_logs"]),
|
||||
len(resp.data["audit_logs"]), # type:ignore
|
||||
pagination["rowsPerPage"]
|
||||
if req["count"] > pagination["rowsPerPage"]
|
||||
else req["count"],
|
||||
)
|
||||
self.assertEqual(resp.data["total"], req["count"])
|
||||
self.assertEqual(resp.data["total"], req["count"]) # type:ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_options_filter(self):
|
||||
url = "/logs/auditlogs/optionsfilter/"
|
||||
|
||||
baker.make_recipe("agents.agent", hostname=seq("AgentHostname"), _quantity=5)
|
||||
baker.make_recipe("agents.agent", hostname=seq("Server"), _quantity=3)
|
||||
baker.make("accounts.User", username=seq("Username"), _quantity=7)
|
||||
baker.make("accounts.User", username=seq("soemthing"), _quantity=3)
|
||||
|
||||
data = [
|
||||
{"req": {"type": "agent", "pattern": "AgeNt"}, "count": 5},
|
||||
{"req": {"type": "agent", "pattern": "AgentHostname1"}, "count": 1},
|
||||
{"req": {"type": "agent", "pattern": "hasjhd"}, "count": 0},
|
||||
{"req": {"type": "user", "pattern": "UsEr"}, "count": 7},
|
||||
{"req": {"type": "user", "pattern": "UserName1"}, "count": 1},
|
||||
{"req": {"type": "user", "pattern": "dfdsadf"}, "count": 0},
|
||||
]
|
||||
|
||||
for req in data:
|
||||
resp = self.client.post(url, req["req"], format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), req["count"])
|
||||
|
||||
# test for invalid payload. needs to have either type: user or agent
|
||||
invalid_data = {"type": "object", "pattern": "SomeString"}
|
||||
|
||||
resp = self.client.post(url, invalid_data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_pending_actions(self):
|
||||
url = "/logs/pendingactions/"
|
||||
agent1 = baker.make_recipe("agents.online_agent")
|
||||
@@ -270,3 +253,87 @@ class TestAuditViews(TacticalTestCase):
|
||||
self.assertEqual(r.data, "error deleting sched task") # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_debug_log(self):
|
||||
url = "/logs/debuglog/"
|
||||
|
||||
# create data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
baker.make(
|
||||
"logs.DebugLog",
|
||||
log_level=cycle(["error", "info", "warning", "critical"]),
|
||||
log_type="agent_issues",
|
||||
agent=agent,
|
||||
_quantity=4,
|
||||
)
|
||||
|
||||
logs = baker.make(
|
||||
"logs.DebugLog",
|
||||
log_type="system_issues",
|
||||
log_level=cycle(["error", "info", "warning", "critical"]),
|
||||
_quantity=15,
|
||||
)
|
||||
|
||||
# test agent filter
|
||||
data = {"agentFilter": agent.id}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 4) # type: ignore
|
||||
|
||||
# test log type filter and agent
|
||||
data = {"agentFilter": agent.id, "logLevelFilter": "warning"}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 1) # type: ignore
|
||||
|
||||
# test time filter with other
|
||||
data = {"logTypeFilter": "system_issues", "logLevelFilter": "error"}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 4) # type: ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestLogTasks(TacticalTestCase):
|
||||
def test_prune_debug_log(self):
|
||||
from .models import DebugLog
|
||||
from .tasks import prune_debug_log
|
||||
|
||||
# setup data
|
||||
debug_log = baker.make(
|
||||
"logs.DebugLog",
|
||||
_quantity=50,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for item in debug_log: # type:ignore
|
||||
item.entry_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
item.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_debug_log(30)
|
||||
|
||||
self.assertEqual(DebugLog.objects.count(), 6)
|
||||
|
||||
def test_prune_audit_log(self):
|
||||
from .models import AuditLog
|
||||
from .tasks import prune_audit_log
|
||||
|
||||
# setup data
|
||||
audit_log = baker.make(
|
||||
"logs.AuditLog",
|
||||
_quantity=50,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for item in audit_log: # type:ignore
|
||||
item.entry_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
item.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_audit_log(30)
|
||||
|
||||
self.assertEqual(AuditLog.objects.count(), 6)
|
||||
|
||||
@@ -5,7 +5,5 @@ from . import views
|
||||
urlpatterns = [
|
||||
path("pendingactions/", views.PendingActions.as_view()),
|
||||
path("auditlogs/", views.GetAuditLogs.as_view()),
|
||||
path("auditlogs/optionsfilter/", views.FilterOptionsAuditLog.as_view()),
|
||||
path("debuglog/<mode>/<hostname>/<order>/", views.debug_log),
|
||||
path("downloadlog/", views.download_log),
|
||||
path("debuglog/", views.GetDebugLog.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,28 +1,23 @@
|
||||
import asyncio
|
||||
import subprocess
|
||||
from datetime import datetime as dt
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.paginator import Paginator
|
||||
from django.db.models import Q
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from accounts.serializers import UserSerializer
|
||||
from agents.models import Agent
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
from tacticalrmm.utils import notify_error
|
||||
from django.core.paginator import Paginator
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from tacticalrmm.utils import notify_error, get_default_timezone
|
||||
|
||||
from .models import AuditLog, PendingAction
|
||||
from .models import AuditLog, PendingAction, DebugLog
|
||||
from .permissions import AuditLogPerms, DebugLogPerms, ManagePendingActionPerms
|
||||
from .serializers import AuditLogSerializer, PendingActionSerializer
|
||||
from .serializers import AuditLogSerializer, DebugLogSerializer, PendingActionSerializer
|
||||
|
||||
|
||||
class GetAuditLogs(APIView):
|
||||
@@ -48,7 +43,7 @@ class GetAuditLogs(APIView):
|
||||
timeFilter = Q()
|
||||
|
||||
if "agentFilter" in request.data:
|
||||
agentFilter = Q(agent__in=request.data["agentFilter"])
|
||||
agentFilter = Q(agent_id__in=request.data["agentFilter"])
|
||||
|
||||
elif "clientFilter" in request.data:
|
||||
clients = Client.objects.filter(
|
||||
@@ -84,36 +79,18 @@ class GetAuditLogs(APIView):
|
||||
).order_by(order_by)
|
||||
|
||||
paginator = Paginator(audit_logs, pagination["rowsPerPage"])
|
||||
ctx = {"default_tz": get_default_timezone()}
|
||||
|
||||
return Response(
|
||||
{
|
||||
"audit_logs": AuditLogSerializer(
|
||||
paginator.get_page(pagination["page"]), many=True
|
||||
paginator.get_page(pagination["page"]), many=True, context=ctx
|
||||
).data,
|
||||
"total": paginator.count,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class FilterOptionsAuditLog(APIView):
|
||||
permission_classes = [IsAuthenticated, AuditLogPerms]
|
||||
|
||||
def post(self, request):
|
||||
if request.data["type"] == "agent":
|
||||
agents = Agent.objects.filter(hostname__icontains=request.data["pattern"])
|
||||
return Response(AgentHostnameSerializer(agents, many=True).data)
|
||||
|
||||
if request.data["type"] == "user":
|
||||
users = User.objects.filter(
|
||||
username__icontains=request.data["pattern"],
|
||||
agent=None,
|
||||
is_installer_user=False,
|
||||
)
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
|
||||
return Response("error", status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class PendingActions(APIView):
|
||||
permission_classes = [IsAuthenticated, ManagePendingActionPerms]
|
||||
|
||||
@@ -158,60 +135,30 @@ class PendingActions(APIView):
|
||||
return Response(f"{action.agent.hostname}: {action.description} was cancelled")
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([IsAuthenticated, DebugLogPerms])
|
||||
def debug_log(request, mode, hostname, order):
|
||||
log_file = settings.LOG_CONFIG["handlers"][0]["sink"]
|
||||
class GetDebugLog(APIView):
|
||||
permission_classes = [IsAuthenticated, DebugLogPerms]
|
||||
|
||||
agents = Agent.objects.prefetch_related("site").only("pk", "hostname")
|
||||
agent_hostnames = AgentHostnameSerializer(agents, many=True)
|
||||
def patch(self, request):
|
||||
agentFilter = Q()
|
||||
logTypeFilter = Q()
|
||||
logLevelFilter = Q()
|
||||
|
||||
switch_mode = {
|
||||
"info": "INFO",
|
||||
"critical": "CRITICAL",
|
||||
"error": "ERROR",
|
||||
"warning": "WARNING",
|
||||
}
|
||||
level = switch_mode.get(mode, "INFO")
|
||||
if "logTypeFilter" in request.data:
|
||||
logTypeFilter = Q(log_type=request.data["logTypeFilter"])
|
||||
|
||||
if hostname == "all" and order == "latest":
|
||||
cmd = f"grep -h {level} {log_file} | tac"
|
||||
elif hostname == "all" and order == "oldest":
|
||||
cmd = f"grep -h {level} {log_file}"
|
||||
elif hostname != "all" and order == "latest":
|
||||
cmd = f"grep {hostname} {log_file} | grep -h {level} | tac"
|
||||
elif hostname != "all" and order == "oldest":
|
||||
cmd = f"grep {hostname} {log_file} | grep -h {level}"
|
||||
else:
|
||||
return Response("error", status=status.HTTP_400_BAD_REQUEST)
|
||||
if "logLevelFilter" in request.data:
|
||||
logLevelFilter = Q(log_level=request.data["logLevelFilter"])
|
||||
|
||||
contents = subprocess.run(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
shell=True,
|
||||
)
|
||||
if "agentFilter" in request.data:
|
||||
agentFilter = Q(agent=request.data["agentFilter"])
|
||||
|
||||
if not contents.stdout:
|
||||
resp = f"No {mode} logs"
|
||||
else:
|
||||
resp = contents.stdout
|
||||
debug_logs = (
|
||||
DebugLog.objects.prefetch_related("agent")
|
||||
.filter(logLevelFilter)
|
||||
.filter(agentFilter)
|
||||
.filter(logTypeFilter)
|
||||
)
|
||||
|
||||
return Response({"log": resp, "agents": agent_hostnames.data})
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([IsAuthenticated, DebugLogPerms])
|
||||
def download_log(request):
|
||||
log_file = settings.LOG_CONFIG["handlers"][0]["sink"]
|
||||
if settings.DEBUG:
|
||||
with open(log_file, "rb") as f:
|
||||
response = HttpResponse(f.read(), content_type="text/plain")
|
||||
response["Content-Disposition"] = "attachment; filename=debug.log"
|
||||
return response
|
||||
else:
|
||||
response = HttpResponse()
|
||||
response["Content-Disposition"] = "attachment; filename=debug.log"
|
||||
response["X-Accel-Redirect"] = "/private/log/debug.log"
|
||||
return response
|
||||
ctx = {"default_tz": get_default_timezone()}
|
||||
ret = DebugLogSerializer(debug_logs, many=True, context=ctx).data
|
||||
return Response(ret)
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
asgiref==3.3.4
|
||||
asgiref==3.4.1
|
||||
asyncio-nats-client==0.11.4
|
||||
celery==5.1.1
|
||||
celery==5.1.2
|
||||
certifi==2021.5.30
|
||||
cffi==1.14.5
|
||||
channels==3.0.3
|
||||
channels_redis==3.2.0
|
||||
cffi==1.14.6
|
||||
channels==3.0.4
|
||||
channels_redis==3.3.0
|
||||
chardet==4.0.0
|
||||
cryptography==3.4.7
|
||||
cryptography==3.4.8
|
||||
daphne==3.0.2
|
||||
Django==3.2.4
|
||||
django-cors-headers==3.7.0
|
||||
Django==3.2.7
|
||||
django-cors-headers==3.8.0
|
||||
django-ipware==3.0.2
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.4
|
||||
future==0.18.2
|
||||
loguru==0.5.3
|
||||
msgpack==1.0.2
|
||||
packaging==20.9
|
||||
packaging==21.0
|
||||
psycopg2-binary==2.9.1
|
||||
pycparser==2.20
|
||||
pycryptodome==3.10.1
|
||||
@@ -24,13 +25,13 @@ pyparsing==2.4.7
|
||||
pytz==2021.1
|
||||
qrcode==6.1
|
||||
redis==3.5.3
|
||||
requests==2.25.1
|
||||
requests==2.26.0
|
||||
six==1.16.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.60.0
|
||||
urllib3==1.26.5
|
||||
twilio==6.63.1
|
||||
urllib3==1.26.6
|
||||
uWSGI==2.0.19.1
|
||||
validators==0.18.2
|
||||
vine==5.0.0
|
||||
websockets==9.1
|
||||
zipp==3.4.1
|
||||
zipp==3.5.0
|
||||
|
||||
@@ -175,11 +175,29 @@
|
||||
"name": "Screenconnect - Get GUID for client",
|
||||
"description": "Returns Screenconnect GUID for client - Use with Custom Fields for later use. ",
|
||||
"args": [
|
||||
"-serviceName {{client.ScreenConnectService}}"
|
||||
"{{client.ScreenConnectService}}"
|
||||
],
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "9cfdfe8f-82bf-4081-a59f-576d694f4649",
|
||||
"filename": "Win_Teamviewer_Get_ID.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "TeamViewer - Get ClientID for client",
|
||||
"description": "Returns Teamviwer ClientID for client - Use with Custom Fields for later use. ",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "e43081d4-6f71-4ce3-881a-22da749f7a57",
|
||||
"filename": "Win_AnyDesk_Get_Anynet_ID.ps1",
|
||||
"submittedBy": "https://github.com/meuchels",
|
||||
"name": "AnyDesk - Get AnyNetID for client",
|
||||
"description": "Returns AnyNetID for client - Use with Custom Fields for later use. ",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "95a2ee6f-b89b-4551-856e-3081b041caa7",
|
||||
"filename": "Win_Power_Profile_Reset_High_Performance_to_Defaults.ps1",
|
||||
@@ -226,6 +244,30 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "907652a5-9ec1-4759-9871-a7743f805ff2",
|
||||
"filename": "Win_Software_Uninstall.ps1",
|
||||
"submittedBy": "https://github.com/subzdev",
|
||||
"name": "Software Uninstaller - list, find, and uninstall most software",
|
||||
"description": "Allows listing, finding and uninstalling most software on Windows. There will be a best effort to uninstall silently if the silent uninstall string is not provided.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software",
|
||||
"default_timeout": "600"
|
||||
},
|
||||
{
|
||||
"guid": "64c3b1a8-c85f-4800-85a3-485f78a2d9ad",
|
||||
"filename": "Win_Bitdefender_GravityZone_Install.ps1",
|
||||
"submittedBy": "https://github.com/jhtechIL/",
|
||||
"name": "BitDefender Gravity Zone Install",
|
||||
"description": "Installs BitDefender Gravity Zone, requires client custom field setup. See script comments for details",
|
||||
"args": [
|
||||
"-url {{client.bdurl}}",
|
||||
"-exe {{client.bdexe}}"
|
||||
],
|
||||
"default_timeout": "2500",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "da51111c-aff6-4d87-9d76-0608e1f67fe5",
|
||||
"filename": "Win_Defender_Enable.ps1",
|
||||
@@ -254,6 +296,16 @@
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"guid": "0afd8d00-b95b-4318-8d07-0b9bc4424287",
|
||||
"filename": "Win_Feature_NET35_Enable.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Windows Feature - Enable .NET 3.5",
|
||||
"description": "Enables the Windows .NET 3.5 Framework in Turn Features on and off",
|
||||
"shell": "powershell",
|
||||
"default_timeout": "300",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"guid": "24f19ead-fdfe-46b4-9dcb-4cd0e12a3940",
|
||||
"filename": "Win_Speedtest.ps1",
|
||||
@@ -368,14 +420,14 @@
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "5615aa90-0272-427b-8acf-0ca019612501",
|
||||
"filename": "Win_Chocolatey_Update_Installed.bat",
|
||||
"guid": "6c78eb04-57ae-43b0-98ed-cbd3ef9e2f80",
|
||||
"filename": "Win_Chocolatey_Manage_Apps_Bulk.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Update Installed Apps",
|
||||
"description": "Update all apps that were installed using Chocolatey.",
|
||||
"shell": "cmd",
|
||||
"name": "Chocolatey - Install, Uninstall and Upgrade Software",
|
||||
"description": "This script installs, uninstalls and updates software using Chocolatey with logic to slow tasks to minimize hitting community limits. Mode install/uninstall/upgrade Hosts x",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey",
|
||||
"default_timeout": "3600"
|
||||
"default_timeout": "600"
|
||||
},
|
||||
{
|
||||
"guid": "fff8024d-d72e-4457-84fa-6c780f69a16f",
|
||||
@@ -450,6 +502,16 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "93038ae0-58ce-433e-a3b9-bc99ad1ea79a",
|
||||
"filename": "Win_Services_AutomaticStartup_Running.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Ensure all services with startup type Automatic are running",
|
||||
"description": "Gets a list of all service with startup type of Automatic but aren't running and tries to start them",
|
||||
"shell": "powershell",
|
||||
"default_timeout": "300",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "e09895d5-ca13-44a2-a38c-6e77c740f0e8",
|
||||
"filename": "Win_ScreenConnectAIO.ps1",
|
||||
@@ -507,6 +569,16 @@
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "7c0c7e37-60ff-462f-9c34-b5cd4c4796a7",
|
||||
"filename": "Win_Wifi_SSID_and_Password_Retrieval.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network Wireless - Retrieve Saved passwords",
|
||||
"description": "Returns all saved wifi passwords stored on the computer",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "abe78170-7cf9-435b-9666-c5ef6c11a106",
|
||||
"filename": "Win_Network_IPv6_Disable.ps1",
|
||||
@@ -527,6 +599,16 @@
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "5676acca-44e5-46c8-af61-ae795ecb3ef1",
|
||||
"filename": "Win_Network_IP_DHCP_Renew.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network - Release and Renew IP",
|
||||
"description": "Trigger and release and renew of IP address on all network adapters",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "83aa4d51-63ce-41e7-829f-3c16e6115bbf",
|
||||
"filename": "Win_Network_DNS_Set_to_1.1.1.2.ps1",
|
||||
@@ -557,6 +639,16 @@
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "43e65e5f-717a-4b6d-a724-1a86229fcd42",
|
||||
"filename": "Win_Activation_Check.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Windows Activation check",
|
||||
"description": "Checks to see if windows is activated and returns status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "120"
|
||||
},
|
||||
{
|
||||
"guid": "83f6c6ea-6120-4fd3-bec8-d3abc505dcdf",
|
||||
"filename": "Win_TRMM_Start_Menu_Delete_Shortcut.ps1",
|
||||
|
||||
22
api/tacticalrmm/scripts/migrations/0009_scriptsnippet.py
Normal file
22
api/tacticalrmm/scripts/migrations/0009_scriptsnippet.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 19:25
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0008_script_guid'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ScriptSnippet',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=40)),
|
||||
('code', models.TextField()),
|
||||
('shell', models.CharField(choices=[('powershell', 'Powershell'), ('cmd', 'Batch (CMD)'), ('python', 'Python')], max_length=15)),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-26 16:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0009_scriptsnippet'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='scriptsnippet',
|
||||
name='desc',
|
||||
field=models.CharField(blank=True, max_length=50, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='script',
|
||||
name='code_base64',
|
||||
field=models.TextField(blank=True, default='', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='script',
|
||||
name='description',
|
||||
field=models.TextField(blank=True, default='', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='name',
|
||||
field=models.CharField(max_length=40, unique=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-31 17:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0010_auto_20210726_1634'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='code',
|
||||
field=models.TextField(default=''),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='desc',
|
||||
field=models.CharField(blank=True, default='', max_length=50),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='scriptsnippet',
|
||||
name='shell',
|
||||
field=models.CharField(choices=[('powershell', 'Powershell'), ('cmd', 'Batch (CMD)'), ('python', 'Python')], default='powershell', max_length=15),
|
||||
),
|
||||
]
|
||||
@@ -1,12 +1,10 @@
|
||||
import base64
|
||||
import re
|
||||
from typing import List, Optional
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
|
||||
from django.db.models.fields import CharField, TextField
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.utils import replace_db_values
|
||||
|
||||
@@ -21,13 +19,11 @@ SCRIPT_TYPES = [
|
||||
("builtin", "Built In"),
|
||||
]
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class Script(BaseAuditModel):
|
||||
guid = name = models.CharField(max_length=64, null=True, blank=True)
|
||||
guid = models.CharField(max_length=64, null=True, blank=True)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(null=True, blank=True)
|
||||
description = models.TextField(null=True, blank=True, default="")
|
||||
filename = models.CharField(max_length=255) # deprecated
|
||||
shell = models.CharField(
|
||||
max_length=100, choices=SCRIPT_SHELLS, default="powershell"
|
||||
@@ -43,20 +39,44 @@ class Script(BaseAuditModel):
|
||||
)
|
||||
favorite = models.BooleanField(default=False)
|
||||
category = models.CharField(max_length=100, null=True, blank=True)
|
||||
code_base64 = models.TextField(null=True, blank=True)
|
||||
code_base64 = models.TextField(null=True, blank=True, default="")
|
||||
default_timeout = models.PositiveIntegerField(default=90)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
def code_no_snippets(self):
|
||||
if self.code_base64:
|
||||
base64_bytes = self.code_base64.encode("ascii", "ignore")
|
||||
return base64.b64decode(base64_bytes).decode("ascii", "ignore")
|
||||
return base64.b64decode(self.code_base64.encode("ascii", "ignore")).decode(
|
||||
"ascii", "ignore"
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
return self.replace_with_snippets(self.code_no_snippets)
|
||||
|
||||
@classmethod
|
||||
def replace_with_snippets(cls, code):
|
||||
# check if snippet has been added to script body
|
||||
matches = re.finditer(r"{{(.*)}}", code)
|
||||
if matches:
|
||||
replaced_code = code
|
||||
for snippet in matches:
|
||||
snippet_name = snippet.group(1).strip()
|
||||
if ScriptSnippet.objects.filter(name=snippet_name).exists():
|
||||
value = ScriptSnippet.objects.get(name=snippet_name).code
|
||||
else:
|
||||
value = ""
|
||||
|
||||
replaced_code = re.sub(snippet.group(), value, replaced_code)
|
||||
|
||||
return replaced_code
|
||||
else:
|
||||
return code
|
||||
|
||||
@classmethod
|
||||
def load_community_scripts(cls):
|
||||
import json
|
||||
@@ -97,20 +117,20 @@ class Script(BaseAuditModel):
|
||||
|
||||
if s.exists():
|
||||
i = s.first()
|
||||
i.name = script["name"]
|
||||
i.description = script["description"]
|
||||
i.category = category
|
||||
i.shell = script["shell"]
|
||||
i.default_timeout = default_timeout
|
||||
i.args = args
|
||||
i.name = script["name"] # type: ignore
|
||||
i.description = script["description"] # type: ignore
|
||||
i.category = category # type: ignore
|
||||
i.shell = script["shell"] # type: ignore
|
||||
i.default_timeout = default_timeout # type: ignore
|
||||
i.args = args # type: ignore
|
||||
|
||||
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||
script_bytes = (
|
||||
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||
)
|
||||
i.code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||
i.code_base64 = base64.b64encode(script_bytes).decode("ascii") # type: ignore
|
||||
|
||||
i.save(
|
||||
i.save( # type: ignore
|
||||
update_fields=[
|
||||
"name",
|
||||
"description",
|
||||
@@ -175,7 +195,6 @@ class Script(BaseAuditModel):
|
||||
guid=script["guid"],
|
||||
name=script["name"],
|
||||
description=script["description"],
|
||||
filename=script["filename"],
|
||||
shell=script["shell"],
|
||||
script_type="builtin",
|
||||
category=category,
|
||||
@@ -209,7 +228,7 @@ class Script(BaseAuditModel):
|
||||
if match:
|
||||
# only get the match between the () in regex
|
||||
string = match.group(1)
|
||||
value = replace_db_values(string=string, agent=agent, shell=shell)
|
||||
value = replace_db_values(string=string, instance=agent, shell=shell)
|
||||
|
||||
if value:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg))
|
||||
@@ -221,3 +240,13 @@ class Script(BaseAuditModel):
|
||||
temp_args.append(arg)
|
||||
|
||||
return temp_args
|
||||
|
||||
|
||||
class ScriptSnippet(models.Model):
|
||||
name = CharField(max_length=40, unique=True)
|
||||
desc = CharField(max_length=50, blank=True, default="")
|
||||
code = TextField(default="")
|
||||
shell = CharField(max_length=15, choices=SCRIPT_SHELLS, default="powershell")
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||
|
||||
from .models import Script
|
||||
from .models import Script, ScriptSnippet
|
||||
|
||||
|
||||
class ScriptTableSerializer(ModelSerializer):
|
||||
@@ -41,3 +41,9 @@ class ScriptCheckSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Script
|
||||
fields = ["code", "shell"]
|
||||
|
||||
|
||||
class ScriptSnippetSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = ScriptSnippet
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
import asyncio
|
||||
|
||||
from agents.models import Agent
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent, AgentHistory
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
def handle_bulk_command_task(
|
||||
agentpks, cmd, shell, timeout, username, run_on_offline=False
|
||||
) -> None:
|
||||
nats_data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
@@ -16,11 +20,31 @@ def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
},
|
||||
}
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="cmd_run",
|
||||
command=cmd,
|
||||
username=username,
|
||||
)
|
||||
nats_data["id"] = hist.pk
|
||||
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout) -> None:
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout, username) -> None:
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
agent.run_script(scriptpk=script.pk, args=args, timeout=timeout)
|
||||
history_pk = 0
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="script_run",
|
||||
script=script,
|
||||
username=username,
|
||||
)
|
||||
history_pk = hist.pk
|
||||
agent.run_script(
|
||||
scriptpk=script.pk, args=args, timeout=timeout, history_pk=history_pk
|
||||
)
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from model_bakery import baker
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Script
|
||||
from .serializers import ScriptSerializer, ScriptTableSerializer
|
||||
from .models import Script, ScriptSnippet
|
||||
from .serializers import (
|
||||
ScriptSerializer,
|
||||
ScriptTableSerializer,
|
||||
ScriptSnippetSerializer,
|
||||
)
|
||||
|
||||
|
||||
class TestScriptViews(TacticalTestCase):
|
||||
@@ -18,7 +21,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.authenticate()
|
||||
|
||||
def test_get_scripts(self):
|
||||
url = "/scripts/scripts/"
|
||||
url = "/scripts/"
|
||||
scripts = baker.make("scripts.Script", _quantity=3)
|
||||
|
||||
serializer = ScriptTableSerializer(scripts, many=True)
|
||||
@@ -29,14 +32,14 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_script(self):
|
||||
url = f"/scripts/scripts/"
|
||||
url = f"/scripts/"
|
||||
|
||||
data = {
|
||||
"name": "Name",
|
||||
"description": "Description",
|
||||
"shell": "powershell",
|
||||
"category": "New",
|
||||
"code": "Some Test Code\nnew Line",
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"default_timeout": 99,
|
||||
"args": ["hello", "world", r"{{agent.public_ip}}"],
|
||||
"favorite": False,
|
||||
@@ -46,47 +49,24 @@ class TestScriptViews(TacticalTestCase):
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Script.objects.filter(name="Name").exists())
|
||||
self.assertEqual(Script.objects.get(name="Name").code, data["code"])
|
||||
|
||||
# test with file upload
|
||||
# file with 'Test' as content
|
||||
file = SimpleUploadedFile(
|
||||
"test_script.bat", b"\x54\x65\x73\x74", content_type="text/plain"
|
||||
)
|
||||
data = {
|
||||
"name": "New Name",
|
||||
"description": "Description",
|
||||
"shell": "cmd",
|
||||
"category": "New",
|
||||
"filename": file,
|
||||
"default_timeout": 4455,
|
||||
"args": json.dumps(
|
||||
["hello", "world", r"{{agent.public_ip}}"]
|
||||
), # simulate javascript's JSON.stringify() for formData
|
||||
}
|
||||
|
||||
# test with file upload
|
||||
resp = self.client.post(url, data, format="multipart")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
script = Script.objects.filter(name="New Name").first()
|
||||
self.assertEquals(script.code, "Test")
|
||||
self.assertEqual(Script.objects.get(name="Name").code, "Test")
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_modify_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.put("/scripts/500/script/", format="json")
|
||||
resp = self.client.put("/scripts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# make a userdefined script
|
||||
script = baker.make_recipe("scripts.script")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/"
|
||||
|
||||
data = {
|
||||
"name": script.name,
|
||||
"description": "Description Change",
|
||||
"shell": script.shell,
|
||||
"code": "Test Code\nAnother Line",
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"default_timeout": 13344556,
|
||||
}
|
||||
|
||||
@@ -95,16 +75,18 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
script = Script.objects.get(pk=script.pk)
|
||||
self.assertEquals(script.description, "Description Change")
|
||||
self.assertEquals(script.code, "Test Code\nAnother Line")
|
||||
self.assertEquals(script.code, "Test")
|
||||
|
||||
# test edit a builtin script
|
||||
|
||||
data = {"name": "New Name", "description": "New Desc", "code": "Some New Code"}
|
||||
data = {
|
||||
"name": "New Name",
|
||||
"description": "New Desc",
|
||||
"code_base64": "VGVzdA==",
|
||||
} # Test
|
||||
builtin_script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||
|
||||
resp = self.client.put(
|
||||
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
||||
)
|
||||
resp = self.client.put(f"/scripts/{builtin_script.pk}/", data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
data = {
|
||||
@@ -112,13 +94,11 @@ class TestScriptViews(TacticalTestCase):
|
||||
"description": "Description Change",
|
||||
"shell": script.shell,
|
||||
"favorite": True,
|
||||
"code": "Test Code\nAnother Line",
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"default_timeout": 54345,
|
||||
}
|
||||
# test marking a builtin script as favorite
|
||||
resp = self.client.put(
|
||||
f"/scripts/{builtin_script.pk}/script/", data, format="json"
|
||||
)
|
||||
resp = self.client.put(f"/scripts/{builtin_script.pk}/", data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Script.objects.get(pk=builtin_script.pk).favorite)
|
||||
|
||||
@@ -126,11 +106,11 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
def test_get_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.get("/scripts/500/script/", format="json")
|
||||
resp = self.client.get("/scripts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
script = baker.make("scripts.Script")
|
||||
url = f"/scripts/{script.pk}/script/" # type: ignore
|
||||
url = f"/scripts/{script.pk}/" # type: ignore
|
||||
serializer = ScriptSerializer(script)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -138,14 +118,34 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_test_script(self, run_script):
|
||||
url = "/scripts/testscript/"
|
||||
|
||||
run_script.return_value = "return value"
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
data = {
|
||||
"agent": agent.pk,
|
||||
"code": "some_code",
|
||||
"timeout": 90,
|
||||
"args": [],
|
||||
"shell": "powershell",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, "return value") # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_delete_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.delete("/scripts/500/script/", format="json")
|
||||
resp = self.client.delete("/scripts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete script
|
||||
script = baker.make_recipe("scripts.script")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/"
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
@@ -153,7 +153,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
# test delete community script
|
||||
script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/"
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
@@ -161,7 +161,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
def test_download_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.get("/scripts/500/download/", format="json")
|
||||
resp = self.client.get("/scripts/download/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# return script code property should be "Test"
|
||||
@@ -170,7 +170,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
script = baker.make(
|
||||
"scripts.Script", code_base64="VGVzdA==", shell="powershell"
|
||||
)
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
url = f"/scripts/download/{script.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -178,7 +178,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
# test batch file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="cmd")
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
url = f"/scripts/download/{script.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -186,7 +186,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
# test python file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="python")
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
url = f"/scripts/download/{script.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -497,3 +497,106 @@ class TestScriptViews(TacticalTestCase):
|
||||
["-Parameter", "-Another $True"],
|
||||
Script.parse_script_args(agent=agent, shell="powershell", args=args),
|
||||
)
|
||||
|
||||
|
||||
class TestScriptSnippetViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.authenticate()
|
||||
|
||||
def test_get_script_snippets(self):
|
||||
url = "/scripts/snippets/"
|
||||
snippets = baker.make("scripts.ScriptSnippet", _quantity=3)
|
||||
|
||||
serializer = ScriptSnippetSerializer(snippets, many=True)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_script_snippet(self):
|
||||
url = f"/scripts/snippets/"
|
||||
|
||||
data = {
|
||||
"name": "Name",
|
||||
"description": "Description",
|
||||
"shell": "powershell",
|
||||
"code": "Test",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(ScriptSnippet.objects.filter(name="Name").exists())
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_modify_script_snippet(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.put("/scripts/snippets/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# make a userdefined script
|
||||
snippet = baker.make("scripts.ScriptSnippet", name="Test")
|
||||
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
|
||||
|
||||
data = {"name": "New Name"} # type: ignore
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
snippet = ScriptSnippet.objects.get(pk=snippet.pk) # type: ignore
|
||||
self.assertEquals(snippet.name, "New Name")
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_get_script_snippet(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.get("/scripts/snippets/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
snippet = baker.make("scripts.ScriptSnippet")
|
||||
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
|
||||
serializer = ScriptSnippetSerializer(snippet)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_delete_script_snippet(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.delete("/scripts/snippets/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete script snippet
|
||||
snippet = baker.make("scripts.ScriptSnippet")
|
||||
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.assertFalse(ScriptSnippet.objects.filter(pk=snippet.pk).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_snippet_replacement(self):
|
||||
|
||||
snippet1 = baker.make(
|
||||
"scripts.ScriptSnippet", name="snippet1", code="Snippet 1 Code"
|
||||
)
|
||||
snippet2 = baker.make(
|
||||
"scripts.ScriptSnippet", name="snippet2", code="Snippet 2 Code"
|
||||
)
|
||||
|
||||
test_no_snippet = "No Snippets Here"
|
||||
test_with_snippet = "Snippet 1: {{snippet1}}\nSnippet 2: {{snippet2}}"
|
||||
|
||||
# test putting snippet in text
|
||||
result = Script.replace_with_snippets(test_with_snippet)
|
||||
self.assertEqual(
|
||||
result,
|
||||
f"Snippet 1: {snippet1.code}\nSnippet 2: {snippet2.code}", # type:ignore
|
||||
)
|
||||
|
||||
# test text with no snippets
|
||||
result = Script.replace_with_snippets(test_no_snippet)
|
||||
self.assertEqual(result, test_no_snippet)
|
||||
|
||||
@@ -3,7 +3,10 @@ from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("scripts/", views.GetAddScripts.as_view()),
|
||||
path("<int:pk>/script/", views.GetUpdateDeleteScript.as_view()),
|
||||
path("<int:pk>/download/", views.download),
|
||||
path("", views.GetAddScripts.as_view()),
|
||||
path("<int:pk>/", views.GetUpdateDeleteScript.as_view()),
|
||||
path("snippets/", views.GetAddScriptSnippets.as_view()),
|
||||
path("snippets/<int:pk>/", views.GetUpdateDeleteScriptSnippet.as_view()),
|
||||
path("testscript/", views.TestScript.as_view()),
|
||||
path("download/<int:pk>/", views.download),
|
||||
]
|
||||
|
||||
@@ -1,64 +1,39 @@
|
||||
import base64
|
||||
import json
|
||||
import asyncio
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.parsers import FileUploadParser
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import Script
|
||||
from .models import Script, ScriptSnippet
|
||||
from .permissions import ManageScriptsPerms
|
||||
from .serializers import ScriptSerializer, ScriptTableSerializer
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from agents.permissions import RunScriptPerms
|
||||
from .serializers import (
|
||||
ScriptSerializer,
|
||||
ScriptTableSerializer,
|
||||
ScriptSnippetSerializer,
|
||||
)
|
||||
|
||||
|
||||
class GetAddScripts(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageScriptsPerms]
|
||||
parser_class = (FileUploadParser,)
|
||||
|
||||
def get(self, request):
|
||||
scripts = Script.objects.all()
|
||||
|
||||
showCommunityScripts = request.GET.get("showCommunityScripts", True)
|
||||
if not showCommunityScripts or showCommunityScripts == "false":
|
||||
scripts = Script.objects.filter(script_type="userdefined")
|
||||
else:
|
||||
scripts = Script.objects.all()
|
||||
|
||||
return Response(ScriptTableSerializer(scripts, many=True).data)
|
||||
|
||||
def post(self, request, format=None):
|
||||
data = {
|
||||
"name": request.data["name"],
|
||||
"category": request.data["category"],
|
||||
"description": request.data["description"],
|
||||
"shell": request.data["shell"],
|
||||
"default_timeout": request.data["default_timeout"],
|
||||
"script_type": "userdefined", # force all uploads to be userdefined. built in scripts cannot be edited by user
|
||||
}
|
||||
def post(self, request):
|
||||
|
||||
# code editor upload
|
||||
if "args" in request.data.keys() and isinstance(request.data["args"], list):
|
||||
data["args"] = request.data["args"]
|
||||
|
||||
# file upload, have to json load it cuz it's formData
|
||||
if "args" in request.data.keys() and "file_upload" in request.data.keys():
|
||||
data["args"] = json.loads(request.data["args"])
|
||||
|
||||
if "favorite" in request.data.keys():
|
||||
data["favorite"] = request.data["favorite"]
|
||||
|
||||
if "filename" in request.data.keys():
|
||||
message_bytes = request.data["filename"].read()
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode(
|
||||
"ascii", "ignore"
|
||||
)
|
||||
|
||||
elif "code" in request.data.keys():
|
||||
message_bytes = request.data["code"].encode("ascii", "ignore")
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||
|
||||
serializer = ScriptSerializer(data=data, partial=True)
|
||||
serializer = ScriptSerializer(data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
|
||||
@@ -85,11 +60,6 @@ class GetUpdateDeleteScript(APIView):
|
||||
else:
|
||||
return notify_error("Community scripts cannot be edited.")
|
||||
|
||||
elif "code" in data:
|
||||
message_bytes = data["code"].encode("ascii")
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||
data.pop("code")
|
||||
|
||||
serializer = ScriptSerializer(data=data, instance=script, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
@@ -107,11 +77,87 @@ class GetUpdateDeleteScript(APIView):
|
||||
return Response(f"{script.name} was deleted!")
|
||||
|
||||
|
||||
class GetAddScriptSnippets(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageScriptsPerms]
|
||||
|
||||
def get(self, request):
|
||||
snippets = ScriptSnippet.objects.all()
|
||||
return Response(ScriptSnippetSerializer(snippets, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
|
||||
serializer = ScriptSnippetSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("Script snippet was saved successfully")
|
||||
|
||||
|
||||
class GetUpdateDeleteScriptSnippet(APIView):
|
||||
permission_classes = [IsAuthenticated, ManageScriptsPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
snippet = get_object_or_404(ScriptSnippet, pk=pk)
|
||||
return Response(ScriptSnippetSerializer(snippet).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
snippet = get_object_or_404(ScriptSnippet, pk=pk)
|
||||
|
||||
serializer = ScriptSnippetSerializer(
|
||||
instance=snippet, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("Script snippet was saved successfully")
|
||||
|
||||
def delete(self, request, pk):
|
||||
snippet = get_object_or_404(ScriptSnippet, pk=pk)
|
||||
snippet.delete()
|
||||
|
||||
return Response("Script snippet was deleted successfully")
|
||||
|
||||
|
||||
class TestScript(APIView):
|
||||
permission_classes = [IsAuthenticated, RunScriptPerms]
|
||||
|
||||
def post(self, request):
|
||||
from .models import Script
|
||||
from agents.models import Agent
|
||||
|
||||
agent = get_object_or_404(Agent, pk=request.data["agent"])
|
||||
|
||||
parsed_args = Script.parse_script_args(
|
||||
agent, request.data["shell"], request.data["args"]
|
||||
)
|
||||
|
||||
data = {
|
||||
"func": "runscript",
|
||||
"timeout": request.data["timeout"],
|
||||
"script_args": parsed_args,
|
||||
"payload": {
|
||||
"code": Script.replace_with_snippets(request.data["code"]),
|
||||
"shell": request.data["shell"],
|
||||
},
|
||||
}
|
||||
|
||||
r = asyncio.run(
|
||||
agent.nats_cmd(data, timeout=request.data["timeout"], wait=True)
|
||||
)
|
||||
|
||||
return Response(r)
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([IsAuthenticated, ManageScriptsPerms])
|
||||
def download(request, pk):
|
||||
script = get_object_or_404(Script, pk=pk)
|
||||
|
||||
with_snippets = request.GET.get("with_snippets", True)
|
||||
|
||||
if with_snippets == "false":
|
||||
with_snippets = False
|
||||
|
||||
if script.shell == "powershell":
|
||||
filename = f"{script.name}.ps1"
|
||||
elif script.shell == "cmd":
|
||||
@@ -119,4 +165,9 @@ def download(request, pk):
|
||||
else:
|
||||
filename = f"{script.name}.py"
|
||||
|
||||
return Response({"filename": filename, "code": script.code})
|
||||
return Response(
|
||||
{
|
||||
"filename": filename,
|
||||
"code": script.code if with_snippets else script.code_no_snippets,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,21 +1,16 @@
|
||||
import asyncio
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .permissions import ManageWinSvcsPerms
|
||||
from .serializers import ServicesSerializer
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view()
|
||||
def get_services(request, pk):
|
||||
|
||||
@@ -14,7 +14,15 @@ class Command(BaseCommand):
|
||||
|
||||
agents = Agent.objects.all()
|
||||
for agent in agents:
|
||||
sw = agent.installedsoftware_set.first().software
|
||||
try:
|
||||
sw = agent.installedsoftware_set.first().software
|
||||
except:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
f"Agent {agent.hostname} missing software list. Try manually refreshing it from the web UI from the software tab."
|
||||
)
|
||||
)
|
||||
continue
|
||||
for i in sw:
|
||||
if search in i["name"].lower():
|
||||
self.stdout.write(
|
||||
|
||||
@@ -5,6 +5,6 @@ from . import views
|
||||
urlpatterns = [
|
||||
path("chocos/", views.chocos),
|
||||
path("install/", views.install),
|
||||
path("installed/<pk>/", views.get_installed),
|
||||
path("refresh/<pk>/", views.refresh_installed),
|
||||
path("installed/<int:pk>/", views.get_installed),
|
||||
path("refresh/<int:pk>/", views.refresh_installed),
|
||||
]
|
||||
|
||||
64
api/tacticalrmm/tacticalrmm/auth.py
Normal file
64
api/tacticalrmm/tacticalrmm/auth.py
Normal file
@@ -0,0 +1,64 @@
|
||||
from django.utils import timezone as djangotime
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import exceptions
|
||||
from rest_framework.authentication import BaseAuthentication, HTTP_HEADER_ENCODING
|
||||
|
||||
from accounts.models import APIKey
|
||||
|
||||
|
||||
def get_authorization_header(request):
|
||||
"""
|
||||
Return request's 'Authorization:' header, as a bytestring.
|
||||
|
||||
Hide some test client ickyness where the header can be unicode.
|
||||
"""
|
||||
auth = request.META.get("HTTP_X_API_KEY", b"")
|
||||
if isinstance(auth, str):
|
||||
# Work around django test client oddness
|
||||
auth = auth.encode(HTTP_HEADER_ENCODING)
|
||||
return auth
|
||||
|
||||
|
||||
class APIAuthentication(BaseAuthentication):
|
||||
"""
|
||||
Simple token based authentication for stateless api access.
|
||||
|
||||
Clients should authenticate by passing the token key in the "X-API-KEY"
|
||||
HTTP header. For example:
|
||||
|
||||
X-API-KEY: ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789
|
||||
"""
|
||||
|
||||
def get_model(self):
|
||||
return APIKey
|
||||
|
||||
def authenticate(self, request):
|
||||
auth = get_authorization_header(request)
|
||||
|
||||
if not auth:
|
||||
return None
|
||||
|
||||
try:
|
||||
apikey = auth.decode()
|
||||
except UnicodeError:
|
||||
msg = _(
|
||||
"Invalid token header. Token string should not contain invalid characters."
|
||||
)
|
||||
raise exceptions.AuthenticationFailed(msg)
|
||||
|
||||
return self.authenticate_credentials(apikey)
|
||||
|
||||
def authenticate_credentials(self, key):
|
||||
try:
|
||||
apikey = APIKey.objects.select_related("user").get(key=key)
|
||||
except APIKey.DoesNotExist:
|
||||
raise exceptions.AuthenticationFailed(_("Invalid token."))
|
||||
|
||||
if not apikey.user.is_active:
|
||||
raise exceptions.AuthenticationFailed(_("User inactive or deleted."))
|
||||
|
||||
# check if token is expired
|
||||
if apikey.expiration and apikey.expiration < djangotime.now():
|
||||
raise exceptions.AuthenticationFailed(_("The token as expired."))
|
||||
|
||||
return (apikey.user, apikey.key)
|
||||
@@ -35,9 +35,13 @@ app.conf.beat_schedule = {
|
||||
"task": "agents.tasks.auto_self_agent_update_task",
|
||||
"schedule": crontab(minute=35, hour="*"),
|
||||
},
|
||||
"monitor-agents": {
|
||||
"task": "agents.tasks.monitor_agents_task",
|
||||
"schedule": crontab(minute="*/7"),
|
||||
"handle-agents": {
|
||||
"task": "agents.tasks.handle_agents_task",
|
||||
"schedule": crontab(minute="*"),
|
||||
},
|
||||
"get-agentinfo": {
|
||||
"task": "agents.tasks.agent_getinfo_task",
|
||||
"schedule": crontab(minute="*"),
|
||||
},
|
||||
"get-wmi": {
|
||||
"task": "agents.tasks.get_wmi_task",
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
SECRET_KEY = 'changeme'
|
||||
|
||||
ALLOWED_HOSTS = ['api.example.com']
|
||||
|
||||
ADMIN_URL = "somerandomstring/"
|
||||
|
||||
CORS_ORIGIN_WHITELIST = ["https://rmm.example.com",]
|
||||
|
||||
DEBUG = False
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': 'tacticalrmm',
|
||||
'USER': 'tacticalrmm',
|
||||
'PASSWORD': 'changeme',
|
||||
'HOST': '127.0.0.1',
|
||||
'PORT': '5432',
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DATETIME_FORMAT': "%b-%d-%Y - %H:%M",
|
||||
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'knox.auth.TokenAuthentication',
|
||||
),
|
||||
}
|
||||
|
||||
if not DEBUG:
|
||||
REST_FRAMEWORK.update({
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
)
|
||||
})
|
||||
|
||||
MESH_USERNAME = "changeme"
|
||||
MESH_SITE = "https://mesh.example.com"
|
||||
MESH_TOKEN_KEY = "changeme"
|
||||
REDIS_HOST = "localhost"
|
||||
@@ -2,6 +2,7 @@ import threading
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from ipware import get_client_ip
|
||||
|
||||
request_local = threading.local()
|
||||
|
||||
@@ -67,6 +68,7 @@ class AuditMiddleware:
|
||||
debug_info["view_func"] = view_func.__name__
|
||||
debug_info["view_args"] = view_args
|
||||
debug_info["view_kwargs"] = view_kwargs
|
||||
debug_info["ip"] = request._client_ip
|
||||
|
||||
request_local.debug_info = debug_info
|
||||
|
||||
@@ -83,3 +85,15 @@ class AuditMiddleware:
|
||||
request_local.debug_info = None
|
||||
request_local.username = None
|
||||
return response
|
||||
|
||||
|
||||
class LogIPMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
client_ip, is_routable = get_client_ip(request)
|
||||
|
||||
request._client_ip = client_ip
|
||||
response = self.get_response(request)
|
||||
return response
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
from rest_framework import permissions
|
||||
from tacticalrmm.auth import APIAuthentication
|
||||
|
||||
|
||||
def _has_perm(request, perm):
|
||||
if request.user.is_superuser or (
|
||||
request.user.role and getattr(request.user.role, "is_superuser")
|
||||
|
||||
@@ -15,23 +15,25 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# latest release
|
||||
TRMM_VERSION = "0.7.2"
|
||||
TRMM_VERSION = "0.8.4"
|
||||
|
||||
# bump this version everytime vue code is changed
|
||||
# to alert user they need to manually refresh their browser
|
||||
APP_VER = "0.0.141"
|
||||
APP_VER = "0.0.146"
|
||||
|
||||
# https://github.com/wh1te909/rmmagent
|
||||
LATEST_AGENT_VER = "1.5.9"
|
||||
LATEST_AGENT_VER = "1.6.2"
|
||||
|
||||
MESH_VER = "0.8.60"
|
||||
MESH_VER = "0.9.16"
|
||||
|
||||
NATS_SERVER_VER = "2.3.3"
|
||||
|
||||
# for the update script, bump when need to recreate venv or npm install
|
||||
PIP_VER = "19"
|
||||
NPM_VER = "19"
|
||||
PIP_VER = "21"
|
||||
NPM_VER = "22"
|
||||
|
||||
SETUPTOOLS_VER = "57.0.0"
|
||||
WHEEL_VER = "0.36.2"
|
||||
SETUPTOOLS_VER = "57.5.0"
|
||||
WHEEL_VER = "0.37.0"
|
||||
|
||||
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
|
||||
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
|
||||
@@ -56,6 +58,21 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
"DATETIME_FORMAT": "%b-%d-%Y - %H:%M",
|
||||
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": (
|
||||
"knox.auth.TokenAuthentication",
|
||||
"tacticalrmm.auth.APIAuthentication",
|
||||
),
|
||||
}
|
||||
|
||||
if not "AZPIPELINE" in os.environ:
|
||||
if not DEBUG: # type: ignore
|
||||
REST_FRAMEWORK.update(
|
||||
{"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",)}
|
||||
)
|
||||
|
||||
INSTALLED_APPS = [
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
@@ -109,6 +126,7 @@ MIDDLEWARE = [
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"corsheaders.middleware.CorsMiddleware", ##
|
||||
"tacticalrmm.middleware.LogIPMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
@@ -173,12 +191,23 @@ STATIC_URL = "/static/"
|
||||
STATIC_ROOT = os.path.join(BASE_DIR, "static")
|
||||
STATICFILES_DIRS = [os.path.join(BASE_DIR, "tacticalrmm/static/")]
|
||||
|
||||
|
||||
LOG_CONFIG = {
|
||||
"handlers": [{"sink": os.path.join(LOG_DIR, "debug.log"), "serialize": False}]
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"handlers": {
|
||||
"file": {
|
||||
"level": "ERROR",
|
||||
"class": "logging.FileHandler",
|
||||
"filename": os.path.join(LOG_DIR, "django_debug.log"),
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"django.request": {"handlers": ["file"], "level": "ERROR", "propagate": True}
|
||||
},
|
||||
}
|
||||
|
||||
if "AZPIPELINE" in os.environ:
|
||||
print("PIPELINE")
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
@@ -193,7 +222,10 @@ if "AZPIPELINE" in os.environ:
|
||||
REST_FRAMEWORK = {
|
||||
"DATETIME_FORMAT": "%b-%d-%Y - %H:%M",
|
||||
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": ("knox.auth.TokenAuthentication",),
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": (
|
||||
"knox.auth.TokenAuthentication",
|
||||
"tacticalrmm.auth.APIAuthentication",
|
||||
),
|
||||
"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",),
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,8 @@ from unittest.mock import mock_open, patch
|
||||
|
||||
import requests
|
||||
from django.conf import settings
|
||||
from django.test import TestCase, override_settings
|
||||
from django.test import override_settings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .utils import (
|
||||
bitdays_to_string,
|
||||
@@ -16,7 +17,10 @@ from .utils import (
|
||||
)
|
||||
|
||||
|
||||
class TestUtils(TestCase):
|
||||
class TestUtils(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("requests.post")
|
||||
@patch("__main__.__builtins__.open", new_callable=mock_open)
|
||||
def test_generate_winagent_exe_success(self, m_open, mock_post):
|
||||
@@ -77,7 +81,7 @@ class TestUtils(TestCase):
|
||||
@patch("subprocess.run")
|
||||
def test_run_nats_api_cmd(self, mock_subprocess):
|
||||
ids = ["a", "b", "c"]
|
||||
_ = run_nats_api_cmd("monitor", ids)
|
||||
_ = run_nats_api_cmd("wmi", ids)
|
||||
mock_subprocess.assert_called_once()
|
||||
|
||||
def test_bitdays_to_string(self):
|
||||
|
||||
@@ -15,14 +15,12 @@ from django.conf import settings
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.http import FileResponse
|
||||
from knox.auth import TokenAuthentication
|
||||
from loguru import logger
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CodeSignToken
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import DebugLog
|
||||
from agents.models import Agent
|
||||
|
||||
notify_error = lambda msg: Response(msg, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -61,7 +59,7 @@ def generate_winagent_exe(
|
||||
)
|
||||
|
||||
try:
|
||||
codetoken = CodeSignToken.objects.first().token
|
||||
codetoken = CodeSignToken.objects.first().token # type:ignore
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
@@ -107,7 +105,7 @@ def generate_winagent_exe(
|
||||
break
|
||||
|
||||
if errors:
|
||||
logger.error(errors)
|
||||
DebugLog.error(message=errors)
|
||||
return notify_error(
|
||||
"Something went wrong. Check debug error log for exact error message"
|
||||
)
|
||||
@@ -123,7 +121,7 @@ def generate_winagent_exe(
|
||||
def get_default_timezone():
|
||||
from core.models import CoreSettings
|
||||
|
||||
return pytz.timezone(CoreSettings.objects.first().default_time_zone)
|
||||
return pytz.timezone(CoreSettings.objects.first().default_time_zone) # type:ignore
|
||||
|
||||
|
||||
def get_bit_days(days: list[str]) -> int:
|
||||
@@ -178,28 +176,28 @@ def filter_software(sw: SoftwareList) -> SoftwareList:
|
||||
|
||||
def reload_nats():
|
||||
users = [{"user": "tacticalrmm", "password": settings.SECRET_KEY}]
|
||||
agents = Agent.objects.prefetch_related("user").only("pk", "agent_id")
|
||||
agents = Agent.objects.prefetch_related("user").only(
|
||||
"pk", "agent_id"
|
||||
) # type:ignore
|
||||
for agent in agents:
|
||||
try:
|
||||
users.append(
|
||||
{"user": agent.agent_id, "password": agent.user.auth_token.key}
|
||||
)
|
||||
except:
|
||||
logger.critical(
|
||||
f"{agent.hostname} does not have a user account, NATS will not work"
|
||||
DebugLog.critical(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} does not have a user account, NATS will not work",
|
||||
)
|
||||
|
||||
domain = settings.ALLOWED_HOSTS[0].split(".", 1)[1]
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
if hasattr(settings, "CERT_FILE") and hasattr(settings, "KEY_FILE"):
|
||||
if os.path.exists(settings.CERT_FILE) and os.path.exists(settings.KEY_FILE):
|
||||
cert_file = settings.CERT_FILE
|
||||
key_file = settings.KEY_FILE
|
||||
else:
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
else:
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
|
||||
config = {
|
||||
"tls": {
|
||||
@@ -207,7 +205,7 @@ def reload_nats():
|
||||
"key_file": key_file,
|
||||
},
|
||||
"authorization": {"users": users},
|
||||
"max_payload": 2048576005,
|
||||
"max_payload": 67108864,
|
||||
}
|
||||
|
||||
conf = os.path.join(settings.BASE_DIR, "nats-rmm.conf")
|
||||
@@ -248,21 +246,36 @@ KnoxAuthMiddlewareStack = lambda inner: KnoxAuthMiddlewareInstance(
|
||||
)
|
||||
|
||||
|
||||
def run_nats_api_cmd(mode: str, ids: list[str], timeout: int = 30) -> None:
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"agents": ids,
|
||||
}
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
def run_nats_api_cmd(mode: str, ids: list[str] = [], timeout: int = 30) -> None:
|
||||
if mode == "wmi":
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"agents": ids,
|
||||
}
|
||||
else:
|
||||
db = settings.DATABASES["default"]
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"user": db["USER"],
|
||||
"pass": db["PASSWORD"],
|
||||
"host": db["HOST"],
|
||||
"port": int(db["PORT"]),
|
||||
"dbname": db["NAME"],
|
||||
}
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
dir="/opt/tactical/tmp" if settings.DOCKER_BUILD else None
|
||||
) as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", mode]
|
||||
try:
|
||||
subprocess.run(cmd, capture_output=True, timeout=timeout)
|
||||
subprocess.run(cmd, timeout=timeout)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(message=e)
|
||||
|
||||
|
||||
def get_latest_trmm_ver() -> str:
|
||||
@@ -277,15 +290,16 @@ def get_latest_trmm_ver() -> str:
|
||||
if "TRMM_VERSION" in line:
|
||||
return line.split(" ")[2].strip('"')
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(message=e)
|
||||
|
||||
return "error"
|
||||
|
||||
|
||||
def replace_db_values(
|
||||
string: str, agent: Agent = None, shell: str = None, quotes=True
|
||||
string: str, instance=None, shell: str = None, quotes=True # type:ignore
|
||||
) -> Union[str, None]:
|
||||
from core.models import CustomField, GlobalKVStore
|
||||
from clients.models import Client, Site
|
||||
|
||||
# split by period if exists. First should be model and second should be property i.e {{client.name}}
|
||||
temp = string.split(".")
|
||||
@@ -293,7 +307,7 @@ def replace_db_values(
|
||||
# check for model and property
|
||||
if len(temp) < 2:
|
||||
# ignore arg since it is invalid
|
||||
return None
|
||||
return ""
|
||||
|
||||
# value is in the global keystore and replace value
|
||||
if temp[0] == "global":
|
||||
@@ -302,30 +316,48 @@ def replace_db_values(
|
||||
|
||||
return f"'{value}'" if quotes else value
|
||||
else:
|
||||
logger.error(
|
||||
f"Couldn't lookup value for: {string}. Make sure it exists in CoreSettings > Key Store"
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f"{agent.hostname} Couldn't lookup value for: {string}. Make sure it exists in CoreSettings > Key Store", # type:ignore
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
if not agent:
|
||||
# agent must be set if not global property
|
||||
return f"There was an error finding the agent: {agent}"
|
||||
if not instance:
|
||||
# instance must be set if not global property
|
||||
return ""
|
||||
|
||||
if temp[0] == "client":
|
||||
model = "client"
|
||||
obj = agent.client
|
||||
if isinstance(instance, Client):
|
||||
obj = instance
|
||||
elif hasattr(instance, "client"):
|
||||
obj = instance.client
|
||||
else:
|
||||
obj = None
|
||||
elif temp[0] == "site":
|
||||
model = "site"
|
||||
obj = agent.site
|
||||
if isinstance(instance, Site):
|
||||
obj = instance
|
||||
elif hasattr(instance, "site"):
|
||||
obj = instance.site
|
||||
else:
|
||||
obj = None
|
||||
elif temp[0] == "agent":
|
||||
model = "agent"
|
||||
obj = agent
|
||||
if isinstance(instance, Agent):
|
||||
obj = instance
|
||||
else:
|
||||
obj = None
|
||||
else:
|
||||
# ignore arg since it is invalid
|
||||
logger.error(
|
||||
f"Not enough information to find value for: {string}. Only agent, site, client, and global are supported."
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f"{instance} Not enough information to find value for: {string}. Only agent, site, client, and global are supported.",
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
if not obj:
|
||||
return ""
|
||||
|
||||
if hasattr(obj, temp[1]):
|
||||
value = f"'{getattr(obj, temp[1])}'" if quotes else getattr(obj, temp[1])
|
||||
@@ -359,19 +391,21 @@ def replace_db_values(
|
||||
|
||||
else:
|
||||
# ignore arg since property is invalid
|
||||
logger.error(
|
||||
f"Couldn't find property on supplied variable: {string}. Make sure it exists as a custom field or a valid agent property"
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f"{instance} Couldn't find property on supplied variable: {string}. Make sure it exists as a custom field or a valid agent property",
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
# log any unhashable type errors
|
||||
if value != None:
|
||||
return value # type: ignore
|
||||
else:
|
||||
logger.error(
|
||||
f"Couldn't lookup value for: {string}. Make sure it exists as a custom field or a valid agent property"
|
||||
DebugLog.error(
|
||||
log_type="scripting",
|
||||
message=f" {instance}({instance.pk}) Couldn't lookup value for: {string}. Make sure it exists as a custom field or a valid agent property",
|
||||
)
|
||||
return None
|
||||
return ""
|
||||
|
||||
|
||||
def format_shell_array(value: list) -> str:
|
||||
|
||||
@@ -3,15 +3,12 @@ import datetime as dt
|
||||
import time
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import DebugLog
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -120,7 +117,11 @@ def check_agent_update_schedule_task():
|
||||
|
||||
if install:
|
||||
# initiate update on agent asynchronously and don't worry about ret code
|
||||
logger.info(f"Installing windows updates on {agent.salt_id}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="windows_updates",
|
||||
message=f"Installing windows updates on {agent.hostname}",
|
||||
)
|
||||
nats_data = {
|
||||
"func": "installwinupdates",
|
||||
"guids": agent.get_approved_update_guids(),
|
||||
|
||||
@@ -8,7 +8,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
Debian10:
|
||||
AGENT_NAME: "azpipelines-deb10"
|
||||
AGENT_NAME: "az-pipeline-fran"
|
||||
|
||||
pool:
|
||||
name: linux-vms
|
||||
@@ -20,6 +20,7 @@ jobs:
|
||||
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS pipeline'
|
||||
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS test_pipeline'
|
||||
sudo -u postgres psql -c 'CREATE DATABASE pipeline'
|
||||
sudo -u postgres psql -c "SET client_encoding = 'UTF8'" pipeline
|
||||
SETTINGS_FILE="/myagent/_work/1/s/api/tacticalrmm/tacticalrmm/settings.py"
|
||||
rm -rf /myagent/_work/1/s/api/env
|
||||
cd /myagent/_work/1/s/api
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="14"
|
||||
SCRIPT_VERSION="15"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
@@ -80,7 +80,7 @@ if [ -f "${sysd}/daphne.service" ]; then
|
||||
sudo cp ${sysd}/daphne.service ${tmp_dir}/systemd/
|
||||
fi
|
||||
|
||||
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
|
||||
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/django_debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
|
||||
cp /rmm/api/tacticalrmm/tacticalrmm/local_settings.py /rmm/api/tacticalrmm/app.ini ${tmp_dir}/rmm/
|
||||
cp /rmm/web/.env ${tmp_dir}/rmm/env
|
||||
cp /rmm/api/tacticalrmm/tacticalrmm/private/exe/mesh*.exe ${tmp_dir}/rmm/
|
||||
|
||||
@@ -15,6 +15,7 @@ MESH_USER=tactical
|
||||
MESH_PASS=tactical
|
||||
MONGODB_USER=mongouser
|
||||
MONGODB_PASSWORD=mongopass
|
||||
MESH_PERSISTENT_CONFIG=0
|
||||
|
||||
# database settings
|
||||
POSTGRES_USER=postgres
|
||||
|
||||
@@ -9,14 +9,19 @@ set -e
|
||||
: "${MONGODB_HOST:=tactical-mongodb}"
|
||||
: "${MONGODB_PORT:=27017}"
|
||||
: "${NGINX_HOST_IP:=172.20.0.20}"
|
||||
: "${MESH_PERSISTENT_CONFIG:=0}"
|
||||
|
||||
mkdir -p /home/node/app/meshcentral-data
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
|
||||
if [ ! -f "/home/node/app/meshcentral-data/config.json" ] || [[ "${MESH_PERSISTENT_CONFIG}" -eq 0 ]]; then
|
||||
|
||||
encoded_uri=$(node -p "encodeURI('mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@${MONGODB_HOST}:${MONGODB_PORT}')")
|
||||
|
||||
mesh_config="$(cat << EOF
|
||||
{
|
||||
"settings": {
|
||||
"mongodb": "mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@${MONGODB_HOST}:${MONGODB_PORT}",
|
||||
"mongodb": "${encoded_uri}",
|
||||
"Cert": "${MESH_HOST}",
|
||||
"TLSOffload": "${NGINX_HOST_IP}",
|
||||
"RedirPort": 80,
|
||||
@@ -54,11 +59,19 @@ EOF
|
||||
|
||||
echo "${mesh_config}" > /home/node/app/meshcentral-data/config.json
|
||||
|
||||
fi
|
||||
|
||||
node node_modules/meshcentral --createaccount ${MESH_USER} --pass ${MESH_PASS} --email example@example.com
|
||||
node node_modules/meshcentral --adminaccount ${MESH_USER}
|
||||
|
||||
if [ ! -f "${TACTICAL_DIR}/tmp/mesh_token" ]; then
|
||||
node node_modules/meshcentral --logintokenkey > ${TACTICAL_DIR}/tmp/mesh_token
|
||||
mesh_token=$(node node_modules/meshcentral --logintokenkey)
|
||||
|
||||
if [[ ${#mesh_token} -eq 160 ]]; then
|
||||
echo ${mesh_token} > /opt/tactical/tmp/mesh_token
|
||||
else
|
||||
echo "Failed to generate mesh token. Fix the error and restart the mesh container"
|
||||
fi
|
||||
fi
|
||||
|
||||
# wait for nginx container
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM nats:2.2.6-alpine
|
||||
FROM nats:2.3.3-alpine
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
@@ -5,6 +5,7 @@ set -e
|
||||
: "${WORKER_CONNECTIONS:=2048}"
|
||||
: "${APP_PORT:=80}"
|
||||
: "${API_PORT:=80}"
|
||||
: "${DEV:=0}"
|
||||
|
||||
CERT_PRIV_PATH=${TACTICAL_DIR}/certs/privkey.pem
|
||||
CERT_PUB_PATH=${TACTICAL_DIR}/certs/fullchain.pem
|
||||
@@ -28,6 +29,34 @@ fi
|
||||
|
||||
/bin/bash -c "sed -i 's/worker_connections.*/worker_connections ${WORKER_CONNECTIONS};/g' /etc/nginx/nginx.conf"
|
||||
|
||||
|
||||
if [ $DEV -eq 1 ]; then
|
||||
API_NGINX="
|
||||
#Using variable to disable start checks
|
||||
set \$api http://tactical-backend:${API_PORT};
|
||||
proxy_pass \$api;
|
||||
proxy_http_version 1.1;
|
||||
proxy_cache_bypass \$http_upgrade;
|
||||
|
||||
proxy_set_header Upgrade \$http_upgrade;
|
||||
proxy_set_header Connection \"upgrade\";
|
||||
proxy_set_header Host \$host;
|
||||
proxy_set_header X-Real-IP \$remote_addr;
|
||||
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||
proxy_set_header X-Forwarded-Host \$host;
|
||||
proxy_set_header X-Forwarded-Port \$server_port;
|
||||
"
|
||||
else
|
||||
API_NGINX="
|
||||
#Using variable to disable start checks
|
||||
set \$api tactical-backend:${API_PORT};
|
||||
|
||||
include uwsgi_params;
|
||||
uwsgi_pass \$api;
|
||||
"
|
||||
fi
|
||||
|
||||
nginx_config="$(cat << EOF
|
||||
# backend config
|
||||
server {
|
||||
@@ -36,21 +65,7 @@ server {
|
||||
server_name ${API_HOST};
|
||||
|
||||
location / {
|
||||
#Using variable to disable start checks
|
||||
set \$api http://tactical-backend:${API_PORT};
|
||||
|
||||
proxy_pass \$api;
|
||||
proxy_http_version 1.1;
|
||||
proxy_cache_bypass \$http_upgrade;
|
||||
|
||||
proxy_set_header Upgrade \$http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host \$host;
|
||||
proxy_set_header X-Real-IP \$remote_addr;
|
||||
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||
proxy_set_header X-Forwarded-Host \$host;
|
||||
proxy_set_header X-Forwarded-Port \$server_port;
|
||||
${API_NGINX}
|
||||
}
|
||||
|
||||
location /static/ {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# creates python virtual env
|
||||
FROM python:3.9.2-slim AS CREATE_VENV_STAGE
|
||||
FROM python:3.9.6-slim AS CREATE_VENV_STAGE
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
@@ -18,13 +18,12 @@ RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends gcc libc6-dev && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
pip install --upgrade pip && \
|
||||
pip install --no-cache-dir setuptools wheel gunicorn && \
|
||||
sed -i '/uWSGI/d' ${TACTICAL_TMP_DIR}/api/requirements.txt && \
|
||||
pip install --no-cache-dir setuptools wheel && \
|
||||
pip install --no-cache-dir -r ${TACTICAL_TMP_DIR}/api/requirements.txt
|
||||
|
||||
|
||||
# runtime image
|
||||
FROM python:3.9.2-slim
|
||||
FROM python:3.9.6-slim
|
||||
|
||||
# set env variables
|
||||
ENV VIRTUAL_ENV /opt/venv
|
||||
|
||||
@@ -36,7 +36,8 @@ if [ "$1" = 'tactical-init' ]; then
|
||||
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/logs
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/log
|
||||
touch ${TACTICAL_DIR}/api/tacticalrmm/private/log/django_debug.log
|
||||
|
||||
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||
echo "waiting for postgresql container to be ready..."
|
||||
@@ -87,24 +88,6 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
|
||||
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'knox.auth.TokenAuthentication',
|
||||
),
|
||||
}
|
||||
|
||||
if not DEBUG:
|
||||
REST_FRAMEWORK.update({
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
)
|
||||
})
|
||||
|
||||
MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
@@ -116,6 +99,28 @@ EOF
|
||||
|
||||
echo "${localvars}" > ${TACTICAL_DIR}/api/tacticalrmm/local_settings.py
|
||||
|
||||
|
||||
uwsgiconf="$(cat << EOF
|
||||
[uwsgi]
|
||||
chdir = /opt/tactical/api
|
||||
module = tacticalrmm.wsgi
|
||||
home = /opt/venv
|
||||
master = true
|
||||
processes = 8
|
||||
threads = 2
|
||||
enable-threads = true
|
||||
socket = 0.0.0.0:80
|
||||
chmod-socket = 660
|
||||
buffer-size = 65535
|
||||
vacuum = true
|
||||
die-on-term = true
|
||||
max-requests = 2000
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${uwsgiconf}" > ${TACTICAL_DIR}/api/uwsgi.ini
|
||||
|
||||
|
||||
# run migrations and init scripts
|
||||
python manage.py migrate --no-input
|
||||
python manage.py collectstatic --no-input
|
||||
@@ -141,22 +146,7 @@ fi
|
||||
if [ "$1" = 'tactical-backend' ]; then
|
||||
check_tactical_ready
|
||||
|
||||
# Prepare log files and start outputting logs to stdout
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/logs
|
||||
touch ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn.log
|
||||
touch ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn-access.log
|
||||
tail -n 0 -f ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn*.log &
|
||||
|
||||
export DJANGO_SETTINGS_MODULE=tacticalrmm.settings
|
||||
|
||||
exec gunicorn tacticalrmm.wsgi:application \
|
||||
--name tactical-backend \
|
||||
--bind 0.0.0.0:80 \
|
||||
--workers 5 \
|
||||
--log-level=info \
|
||||
--log-file=${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn.log \
|
||||
--access-logfile=${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn-access.log \
|
||||
|
||||
uwsgi ${TACTICAL_DIR}/api/uwsgi.ini
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celery' ]; then
|
||||
@@ -170,7 +160,7 @@ if [ "$1" = 'tactical-celerybeat' ]; then
|
||||
celery -A tacticalrmm beat -l info
|
||||
fi
|
||||
|
||||
# backend container
|
||||
# websocket container
|
||||
if [ "$1" = 'tactical-websockets' ]; then
|
||||
check_tactical_ready
|
||||
|
||||
|
||||
@@ -97,6 +97,7 @@ services:
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
MONGODB_USER: ${MONGODB_USER}
|
||||
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||
MESH_PERSISTENT_CONFIG: ${MESH_PERSISTENT_CONFIG}
|
||||
networks:
|
||||
proxy:
|
||||
aliases:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user