Compare commits
275 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ade64d6c0a | ||
|
|
8204bdfc5f | ||
|
|
1a9bb3e986 | ||
|
|
49356479e5 | ||
|
|
c44e9a7292 | ||
|
|
21771a593f | ||
|
|
84458dfc4c | ||
|
|
5835632dab | ||
|
|
67aa7229ef | ||
|
|
b72dc3ed3a | ||
|
|
0f93d4a5bd | ||
|
|
106320b035 | ||
|
|
63951705cd | ||
|
|
a8d56921d5 | ||
|
|
10bc133cf1 | ||
|
|
adeb5b35c9 | ||
|
|
589ff46ea5 | ||
|
|
656fcb9fe7 | ||
|
|
1cb9353006 | ||
|
|
57bf16ba07 | ||
|
|
659846ed88 | ||
|
|
25894044e0 | ||
|
|
e7a0826beb | ||
|
|
1f7ddee23b | ||
|
|
7e186730db | ||
|
|
6713a50208 | ||
|
|
7c9d8fcfec | ||
|
|
33bfc8cfe8 | ||
|
|
ca735bc14a | ||
|
|
4ba748a18b | ||
|
|
f1845106f8 | ||
|
|
67e7156c4b | ||
|
|
4a476adebf | ||
|
|
918798f8cc | ||
|
|
5a3f868866 | ||
|
|
feea2c6396 | ||
|
|
707b4c46d9 | ||
|
|
89ca39fc2b | ||
|
|
204281b12d | ||
|
|
a8538a7e95 | ||
|
|
dee1b471e9 | ||
|
|
aa04e9b01f | ||
|
|
350f0dc604 | ||
|
|
6021f2efd6 | ||
|
|
51838ec25a | ||
|
|
54768a121e | ||
|
|
8ff72cdca3 | ||
|
|
2cb53ad06b | ||
|
|
b8349de31d | ||
|
|
d7e11af7f8 | ||
|
|
dd8d39e698 | ||
|
|
afb1316daa | ||
|
|
04d7017536 | ||
|
|
6a1c75b060 | ||
|
|
5c94611f3b | ||
|
|
4e5676e80f | ||
|
|
c96d688a9c | ||
|
|
804242e9a5 | ||
|
|
0ec9760b17 | ||
|
|
d481ae3da4 | ||
|
|
4742c14fc1 | ||
|
|
509b0d501b | ||
|
|
d4c9b04d4e | ||
|
|
16fb4d331b | ||
|
|
e9e5bf31a7 | ||
|
|
221418120e | ||
|
|
46f852e26e | ||
|
|
4234cf0a31 | ||
|
|
7f3daea648 | ||
|
|
2eb16c82f4 | ||
|
|
e00b2ce591 | ||
|
|
d71e1311ca | ||
|
|
2cf16963e3 | ||
|
|
10bf7b7fb4 | ||
|
|
182c85a228 | ||
|
|
94b1988b90 | ||
|
|
6f7e62e9a0 | ||
|
|
aa7076af04 | ||
|
|
c928e8f0d4 | ||
|
|
5c6b106f68 | ||
|
|
d45bcea1ff | ||
|
|
6ff2dc79f8 | ||
|
|
b752329987 | ||
|
|
f21465335a | ||
|
|
0801adfc4b | ||
|
|
5bee8052d5 | ||
|
|
68dca5dfef | ||
|
|
3f51dd1d2f | ||
|
|
7f80889d77 | ||
|
|
efc61c0222 | ||
|
|
6fc0a05d34 | ||
|
|
a9be872d7a | ||
|
|
6ca85f099e | ||
|
|
86ff677b8a | ||
|
|
35e295df86 | ||
|
|
cd4d301790 | ||
|
|
93bb329c3d | ||
|
|
7c1e0f2c30 | ||
|
|
b57f471f44 | ||
|
|
252a9a2ed6 | ||
|
|
7258d4d787 | ||
|
|
75522fa295 | ||
|
|
4ba8f41d95 | ||
|
|
f326f8e4de | ||
|
|
f863dc058e | ||
|
|
20891db251 | ||
|
|
f1d05f1342 | ||
|
|
8dd636b0eb | ||
|
|
6b5bda8ee1 | ||
|
|
ddc5597157 | ||
|
|
ae112c7257 | ||
|
|
c22f10f96a | ||
|
|
18d10c9bec | ||
|
|
890e430cb7 | ||
|
|
dadc3d4cd7 | ||
|
|
d98b4d7320 | ||
|
|
340f532238 | ||
|
|
7669f68e7c | ||
|
|
3557e5514f | ||
|
|
a9f09b7614 | ||
|
|
845b9e4568 | ||
|
|
24a6092dcf | ||
|
|
195ae7d8b1 | ||
|
|
a5c6ea7ffc | ||
|
|
eb7a4ac29f | ||
|
|
508ef73fde | ||
|
|
838d6d8076 | ||
|
|
762c3159b8 | ||
|
|
7a88a06bcf | ||
|
|
0b1e3d7de5 | ||
|
|
9a83c73f21 | ||
|
|
aa50c7b268 | ||
|
|
179a5a80f4 | ||
|
|
0ddae527ef | ||
|
|
ee7a46de26 | ||
|
|
95522fda74 | ||
|
|
e58881c2bd | ||
|
|
36a902a44e | ||
|
|
16b74549a2 | ||
|
|
da7ededfb1 | ||
|
|
790bb08718 | ||
|
|
e6765f421f | ||
|
|
7e8f1fe904 | ||
|
|
eacce4578a | ||
|
|
07b2543972 | ||
|
|
d1c3fc8493 | ||
|
|
f453b16010 | ||
|
|
05151d8978 | ||
|
|
8218e1acc3 | ||
|
|
30212fc89a | ||
|
|
b31c13fcae | ||
|
|
6b95fc6f1d | ||
|
|
369cf17eb2 | ||
|
|
4dd8f512cc | ||
|
|
26cfec7d80 | ||
|
|
67a87ccf00 | ||
|
|
667cebcf94 | ||
|
|
bc1747ca1c | ||
|
|
945d8647bf | ||
|
|
dfe2e94627 | ||
|
|
09a5591eec | ||
|
|
f2bf06a0ba | ||
|
|
eedad4ab1c | ||
|
|
336a62ab29 | ||
|
|
b5603a5233 | ||
|
|
73890f553c | ||
|
|
f6243b8968 | ||
|
|
3770dc74d4 | ||
|
|
45f4e947c5 | ||
|
|
9928d7c6e1 | ||
|
|
bf776eeb2b | ||
|
|
ae7c0e9195 | ||
|
|
e90b640602 | ||
|
|
ba7529d3f5 | ||
|
|
34667f252e | ||
|
|
d18bddcb7b | ||
|
|
96dff49d33 | ||
|
|
b389728338 | ||
|
|
cdc7da86f3 | ||
|
|
4745cc0378 | ||
|
|
434f132479 | ||
|
|
fb0f31ffc7 | ||
|
|
bb1d73c0ae | ||
|
|
0e823d1191 | ||
|
|
48f4199ff3 | ||
|
|
eaf379587b | ||
|
|
672446b7d1 | ||
|
|
dfe52c1b07 | ||
|
|
d63df03ad8 | ||
|
|
aba4f9f2ce | ||
|
|
ac5c1e7803 | ||
|
|
d521dbf50e | ||
|
|
f210ed3e6a | ||
|
|
df3cac4ea6 | ||
|
|
f778c5175b | ||
|
|
6c66ff28dd | ||
|
|
d5b6ec702b | ||
|
|
c62a5fcef2 | ||
|
|
59c47e9200 | ||
|
|
4ba44d8932 | ||
|
|
27dae05e1b | ||
|
|
a251ae9b90 | ||
|
|
7e960b2bde | ||
|
|
5df4825158 | ||
|
|
8984d06d93 | ||
|
|
eed7aac047 | ||
|
|
54b068de4a | ||
|
|
f0f33b00b6 | ||
|
|
1043405088 | ||
|
|
0131b10805 | ||
|
|
a19b441f62 | ||
|
|
28edc31d43 | ||
|
|
0f9872a818 | ||
|
|
76ce4296f3 | ||
|
|
3dd2671380 | ||
|
|
298ca31332 | ||
|
|
8f911aa6b9 | ||
|
|
82a5c7d9b1 | ||
|
|
7f013dcdba | ||
|
|
68e2e16076 | ||
|
|
ea23c763c9 | ||
|
|
5dcecb3206 | ||
|
|
5bd48e2d0e | ||
|
|
afd0a02589 | ||
|
|
2379192d53 | ||
|
|
a6489290c8 | ||
|
|
5f74c43415 | ||
|
|
aa8b84a302 | ||
|
|
b987d041b0 | ||
|
|
b62e37307e | ||
|
|
61a59aa6ac | ||
|
|
f79ec27f1d | ||
|
|
b993fe380f | ||
|
|
d974b5f55f | ||
|
|
f21ae93197 | ||
|
|
342ff18be8 | ||
|
|
a8236f69bf | ||
|
|
ab15a2448d | ||
|
|
6ff4d8f558 | ||
|
|
bb04ba528c | ||
|
|
b94a795189 | ||
|
|
9968184733 | ||
|
|
1be6f8f87a | ||
|
|
426821cceb | ||
|
|
4fec0deaf7 | ||
|
|
144ac5b6ce | ||
|
|
97c73786fa | ||
|
|
82e59d7da0 | ||
|
|
b2c10de6af | ||
|
|
d72029c2c6 | ||
|
|
17b9987063 | ||
|
|
fde07da2b7 | ||
|
|
c23bc29511 | ||
|
|
714cad2a52 | ||
|
|
357d5d2fde | ||
|
|
d477cce901 | ||
|
|
eb6af52ad1 | ||
|
|
aae75023a7 | ||
|
|
41dcd4f458 | ||
|
|
4651ae4495 | ||
|
|
ed61e0b0fc | ||
|
|
1eefc6fbf4 | ||
|
|
09ebf2cea2 | ||
|
|
b3b0c4cd65 | ||
|
|
f4b7924e8f | ||
|
|
ea68d38b82 | ||
|
|
dfbaa71132 | ||
|
|
6c328deb08 | ||
|
|
add564d5bf | ||
|
|
fa94acb426 | ||
|
|
6827468f13 | ||
|
|
53fd43868f | ||
|
|
9ced7561c5 | ||
|
|
31d55d3425 | ||
|
|
171d2a5bb9 |
@@ -8,7 +8,7 @@ ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 8000 8383
|
||||
EXPOSE 8000 8383 8005
|
||||
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
@@ -2,6 +2,7 @@ version: '3.4'
|
||||
|
||||
services:
|
||||
api-dev:
|
||||
container_name: trmm-api-dev
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
@@ -21,6 +22,7 @@ services:
|
||||
- tactical-backend
|
||||
|
||||
app-dev:
|
||||
container_name: trmm-app-dev
|
||||
image: node:14-alpine
|
||||
restart: always
|
||||
command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
@@ -36,6 +38,7 @@ services:
|
||||
|
||||
# nats
|
||||
nats-dev:
|
||||
container_name: trmm-nats-dev
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -55,6 +58,7 @@ services:
|
||||
|
||||
# meshcentral container
|
||||
meshcentral-dev:
|
||||
container_name: trmm-meshcentral-dev
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -77,6 +81,7 @@ services:
|
||||
|
||||
# mongodb container for meshcentral
|
||||
mongodb-dev:
|
||||
container_name: trmm-mongodb-dev
|
||||
image: mongo:4.4
|
||||
restart: always
|
||||
environment:
|
||||
@@ -92,6 +97,7 @@ services:
|
||||
|
||||
# postgres database for api service
|
||||
postgres-dev:
|
||||
container_name: trmm-postgres-dev
|
||||
image: postgres:13-alpine
|
||||
restart: always
|
||||
environment:
|
||||
@@ -107,6 +113,7 @@ services:
|
||||
|
||||
# redis container for celery tasks
|
||||
redis-dev:
|
||||
container_name: trmm-redis-dev
|
||||
restart: always
|
||||
image: redis:6.0-alpine
|
||||
networks:
|
||||
@@ -115,6 +122,7 @@ services:
|
||||
- tactical-redis
|
||||
|
||||
init-dev:
|
||||
container_name: trmm-init-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
@@ -143,6 +151,7 @@ services:
|
||||
|
||||
# container for celery worker service
|
||||
celery-dev:
|
||||
container_name: trmm-celery-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
@@ -160,6 +169,7 @@ services:
|
||||
|
||||
# container for celery beat service
|
||||
celerybeat-dev:
|
||||
container_name: trmm-celerybeat-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
@@ -175,8 +185,9 @@ services:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
# container for celery beat service
|
||||
# container for websockets communication
|
||||
websockets-dev:
|
||||
container_name: trmm-websockets-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
@@ -194,8 +205,9 @@ services:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
nginx-dev:
|
||||
# container for tactical reverse proxy
|
||||
nginx-dev:
|
||||
container_name: trmm-nginx-dev
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -215,6 +227,21 @@ services:
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
|
||||
mkdocs-dev:
|
||||
container_name: trmm-mkdocs-dev
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-mkdocs-dev"]
|
||||
ports:
|
||||
- "8005:8005"
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
- dev
|
||||
|
||||
volumes:
|
||||
tactical-data-dev:
|
||||
postgres-data-dev:
|
||||
|
||||
@@ -136,10 +136,11 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
||||
webenv="$(cat << EOF
|
||||
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
APP_URL = https://${APP_HOST}
|
||||
APP_URL = "https://${APP_HOST}"
|
||||
DOCKER_BUILD = 1
|
||||
EOF
|
||||
)"
|
||||
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
||||
echo "${webenv}" | tee "${WORKSPACE_DIR}"/web/.env > /dev/null
|
||||
|
||||
# chown everything to tactical user
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||
@@ -169,3 +170,8 @@ if [ "$1" = 'tactical-websockets-dev' ]; then
|
||||
check_tactical_ready
|
||||
"${VIRTUAL_ENV}"/bin/daphne tacticalrmm.asgi:application --port 8383 -b 0.0.0.0
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-mkdocs-dev' ]; then
|
||||
cd "${WORKSPACE_DIR}/docs"
|
||||
"${VIRTUAL_ENV}"/bin/mkdocs serve
|
||||
fi
|
||||
|
||||
@@ -11,8 +11,6 @@ It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and i
|
||||
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
*Tactical RMM is currently in alpha and subject to breaking changes. Use in production at your own risk.*
|
||||
|
||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||
|
||||
### [Documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2 on 2021-04-11 01:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0013_user_client_tree_sort'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='client_tree_splitter',
|
||||
field=models.PositiveIntegerField(default=11),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2 on 2021-04-11 03:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0014_user_client_tree_splitter'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='loading_bar_color',
|
||||
field=models.CharField(default='red', max_length=255),
|
||||
),
|
||||
]
|
||||
@@ -36,6 +36,8 @@ class User(AbstractUser, BaseAuditModel):
|
||||
client_tree_sort = models.CharField(
|
||||
max_length=50, choices=CLIENT_TREE_SORT_CHOICES, default="alphafail"
|
||||
)
|
||||
client_tree_splitter = models.PositiveIntegerField(default=11)
|
||||
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
|
||||
@@ -13,6 +13,8 @@ class UserUISerializer(ModelSerializer):
|
||||
"agent_dblclick_action",
|
||||
"default_agent_tbl_tab",
|
||||
"client_tree_sort",
|
||||
"client_tree_splitter",
|
||||
"loading_bar_color",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -278,6 +278,8 @@ class TestUserAction(TacticalTestCase):
|
||||
"agent_dblclick_action": "editagent",
|
||||
"default_agent_tbl_tab": "mixed",
|
||||
"client_tree_sort": "alpha",
|
||||
"client_tree_splitter": 14,
|
||||
"loading_bar_color": "green",
|
||||
}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-17 01:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0035_auto_20210329_1709'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='block_policy_inheritance',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -63,6 +63,7 @@ class Agent(BaseAuditModel):
|
||||
max_length=255, choices=TZ_CHOICES, null=True, blank=True
|
||||
)
|
||||
maintenance_mode = models.BooleanField(default=False)
|
||||
block_policy_inheritance = models.BooleanField(default=False)
|
||||
alert_template = models.ForeignKey(
|
||||
"alerts.AlertTemplate",
|
||||
related_name="agents",
|
||||
@@ -96,9 +97,9 @@ class Agent(BaseAuditModel):
|
||||
# or if site has changed on agent and if so generate-policies
|
||||
if (
|
||||
not old_agent
|
||||
or old_agent
|
||||
and old_agent.policy != self.policy
|
||||
or old_agent.site != self.site
|
||||
or (old_agent and old_agent.policy != self.policy)
|
||||
or (old_agent.site != self.site)
|
||||
or (old_agent.block_policy_inheritance != self.block_policy_inheritance)
|
||||
):
|
||||
self.generate_checks_from_policies()
|
||||
self.generate_tasks_from_policies()
|
||||
@@ -166,7 +167,7 @@ class Agent(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def checks(self):
|
||||
total, passing, failing = 0, 0, 0
|
||||
total, passing, failing, warning, info = 0, 0, 0, 0, 0
|
||||
|
||||
if self.agentchecks.exists(): # type: ignore
|
||||
for i in self.agentchecks.all(): # type: ignore
|
||||
@@ -174,13 +175,20 @@ class Agent(BaseAuditModel):
|
||||
if i.status == "passing":
|
||||
passing += 1
|
||||
elif i.status == "failing":
|
||||
failing += 1
|
||||
if i.alert_severity == "error":
|
||||
failing += 1
|
||||
elif i.alert_severity == "warning":
|
||||
warning += 1
|
||||
elif i.alert_severity == "info":
|
||||
info += 1
|
||||
|
||||
ret = {
|
||||
"total": total,
|
||||
"passing": passing,
|
||||
"failing": failing,
|
||||
"has_failing_checks": failing > 0,
|
||||
"warning": warning,
|
||||
"info": info,
|
||||
"has_failing_checks": failing > 0 or warning > 0,
|
||||
}
|
||||
return ret
|
||||
|
||||
@@ -195,6 +203,27 @@ class Agent(BaseAuditModel):
|
||||
except:
|
||||
return ["unknown cpu model"]
|
||||
|
||||
@property
|
||||
def graphics(self):
|
||||
ret, mrda = [], []
|
||||
try:
|
||||
graphics = self.wmi_detail["graphics"]
|
||||
for i in graphics:
|
||||
caption = [x["Caption"] for x in i if "Caption" in x][0]
|
||||
if "microsoft remote display adapter" in caption.lower():
|
||||
mrda.append("yes")
|
||||
continue
|
||||
|
||||
ret.append([x["Caption"] for x in i if "Caption" in x][0])
|
||||
|
||||
# only return this if no other graphics cards
|
||||
if not ret and mrda:
|
||||
return "Microsoft Remote Display Adapter"
|
||||
|
||||
return ", ".join(ret)
|
||||
except:
|
||||
return "Graphics info requires agent v1.4.14"
|
||||
|
||||
@property
|
||||
def local_ips(self):
|
||||
ret = []
|
||||
@@ -322,7 +351,7 @@ class Agent(BaseAuditModel):
|
||||
online = [
|
||||
agent
|
||||
for agent in Agent.objects.only(
|
||||
"pk", "last_seen", "overdue_time", "offline_time"
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
if agent.status == "online"
|
||||
]
|
||||
@@ -393,21 +422,34 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# check site policy if agent policy doesn't have one
|
||||
elif site.server_policy and site.server_policy.winupdatepolicy.exists():
|
||||
patch_policy = site.server_policy.winupdatepolicy.get()
|
||||
# make sure agent isn;t blocking policy inheritance
|
||||
if not self.block_policy_inheritance:
|
||||
patch_policy = site.server_policy.winupdatepolicy.get()
|
||||
|
||||
# if site doesn't have a patch policy check the client
|
||||
elif (
|
||||
site.client.server_policy
|
||||
and site.client.server_policy.winupdatepolicy.exists()
|
||||
):
|
||||
patch_policy = site.client.server_policy.winupdatepolicy.get()
|
||||
# make sure agent and site are not blocking inheritance
|
||||
if (
|
||||
not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
):
|
||||
patch_policy = site.client.server_policy.winupdatepolicy.get()
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.server_policy
|
||||
and core_settings.server_policy.winupdatepolicy.exists()
|
||||
):
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get()
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get()
|
||||
|
||||
elif self.monitoring_type == "workstation":
|
||||
# check agent policy first which should override client or site policy
|
||||
@@ -418,21 +460,36 @@ class Agent(BaseAuditModel):
|
||||
site.workstation_policy
|
||||
and site.workstation_policy.winupdatepolicy.exists()
|
||||
):
|
||||
patch_policy = site.workstation_policy.winupdatepolicy.get()
|
||||
# make sure agent isn;t blocking policy inheritance
|
||||
if not self.block_policy_inheritance:
|
||||
patch_policy = site.workstation_policy.winupdatepolicy.get()
|
||||
|
||||
# if site doesn't have a patch policy check the client
|
||||
elif (
|
||||
site.client.workstation_policy
|
||||
and site.client.workstation_policy.winupdatepolicy.exists()
|
||||
):
|
||||
patch_policy = site.client.workstation_policy.winupdatepolicy.get()
|
||||
# make sure agent and site are not blocking inheritance
|
||||
if (
|
||||
not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
):
|
||||
patch_policy = site.client.workstation_policy.winupdatepolicy.get()
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.workstation_policy
|
||||
and core_settings.workstation_policy.winupdatepolicy.exists()
|
||||
):
|
||||
patch_policy = core_settings.workstation_policy.winupdatepolicy.get()
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = (
|
||||
core_settings.workstation_policy.winupdatepolicy.get()
|
||||
)
|
||||
|
||||
# if policy still doesn't exist return the agent patch policy
|
||||
if not patch_policy:
|
||||
@@ -499,6 +556,7 @@ class Agent(BaseAuditModel):
|
||||
and site.server_policy
|
||||
and site.server_policy.alert_template
|
||||
and site.server_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
):
|
||||
templates.append(site.server_policy.alert_template)
|
||||
if (
|
||||
@@ -506,6 +564,7 @@ class Agent(BaseAuditModel):
|
||||
and site.workstation_policy
|
||||
and site.workstation_policy.alert_template
|
||||
and site.workstation_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
):
|
||||
templates.append(site.workstation_policy.alert_template)
|
||||
|
||||
@@ -519,6 +578,8 @@ class Agent(BaseAuditModel):
|
||||
and client.server_policy
|
||||
and client.server_policy.alert_template
|
||||
and client.server_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
):
|
||||
templates.append(client.server_policy.alert_template)
|
||||
if (
|
||||
@@ -526,15 +587,28 @@ class Agent(BaseAuditModel):
|
||||
and client.workstation_policy
|
||||
and client.workstation_policy.alert_template
|
||||
and client.workstation_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
):
|
||||
templates.append(client.workstation_policy.alert_template)
|
||||
|
||||
# check if alert template is on client and return
|
||||
if client.alert_template and client.alert_template.is_active:
|
||||
if (
|
||||
client.alert_template
|
||||
and client.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
):
|
||||
templates.append(client.alert_template)
|
||||
|
||||
# check if alert template is applied globally and return
|
||||
if core.alert_template and core.alert_template.is_active:
|
||||
if (
|
||||
core.alert_template
|
||||
and core.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.alert_template)
|
||||
|
||||
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
||||
@@ -543,6 +617,9 @@ class Agent(BaseAuditModel):
|
||||
and core.server_policy
|
||||
and core.server_policy.alert_template
|
||||
and core.server_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.server_policy.alert_template)
|
||||
if (
|
||||
@@ -550,6 +627,9 @@ class Agent(BaseAuditModel):
|
||||
and core.workstation_policy
|
||||
and core.workstation_policy.alert_template
|
||||
and core.workstation_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.workstation_policy.alert_template)
|
||||
|
||||
@@ -651,7 +731,11 @@ class Agent(BaseAuditModel):
|
||||
except ErrTimeout:
|
||||
ret = "timeout"
|
||||
else:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
try:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
ret = str(e)
|
||||
|
||||
await nc.close()
|
||||
return ret
|
||||
@@ -699,36 +783,6 @@ class Agent(BaseAuditModel):
|
||||
except:
|
||||
pass
|
||||
|
||||
# define how the agent should handle pending actions
|
||||
def handle_pending_actions(self):
|
||||
pending_actions = self.pendingactions.filter(status="pending") # type: ignore
|
||||
|
||||
for action in pending_actions:
|
||||
if action.action_type == "taskaction":
|
||||
from autotasks.tasks import (
|
||||
create_win_task_schedule,
|
||||
delete_win_task_schedule,
|
||||
enable_or_disable_win_task,
|
||||
)
|
||||
|
||||
task_id = action.details["task_id"]
|
||||
|
||||
if action.details["action"] == "taskcreate":
|
||||
create_win_task_schedule.delay(task_id, pending_action=action.id)
|
||||
elif action.details["action"] == "tasktoggle":
|
||||
enable_or_disable_win_task.delay(
|
||||
task_id, action.details["value"], pending_action=action.id
|
||||
)
|
||||
elif action.details["action"] == "taskdelete":
|
||||
delete_win_task_schedule.delay(task_id, pending_action=action.id)
|
||||
|
||||
# for clearing duplicate pending actions on agent
|
||||
def remove_matching_pending_task_actions(self, task_id):
|
||||
# remove any other pending actions on agent with same task_id
|
||||
for action in self.pendingactions.filter(action_type="taskaction").exclude(status="completed"): # type: ignore
|
||||
if action.details["task_id"] == task_id:
|
||||
action.delete()
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
return (
|
||||
self.overdue_dashboard_alert
|
||||
@@ -815,12 +869,6 @@ class RecoveryAction(models.Model):
|
||||
def __str__(self):
|
||||
return f"{self.agent.hostname} - {self.mode}"
|
||||
|
||||
def send(self):
|
||||
ret = {"recovery": self.mode}
|
||||
if self.mode == "command":
|
||||
ret["cmd"] = self.command
|
||||
return ret
|
||||
|
||||
|
||||
class Note(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
|
||||
@@ -16,6 +16,7 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
local_ips = serializers.ReadOnlyField()
|
||||
make_model = serializers.ReadOnlyField()
|
||||
physical_disks = serializers.ReadOnlyField()
|
||||
graphics = serializers.ReadOnlyField()
|
||||
checks = serializers.ReadOnlyField()
|
||||
timezone = serializers.ReadOnlyField()
|
||||
all_timezones = serializers.SerializerMethodField()
|
||||
@@ -115,6 +116,7 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"logged_username",
|
||||
"italic",
|
||||
"policy",
|
||||
"block_policy_inheritance",
|
||||
]
|
||||
depth = 2
|
||||
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import json
|
||||
import random
|
||||
import subprocess
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
@@ -13,21 +11,21 @@ from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from core.models import CodeSignToken, CoreSettings
|
||||
from logs.models import PendingAction
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.utils import run_nats_api_cmd
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
def agent_update(pk: int) -> str:
|
||||
def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str:
|
||||
from agents.utils import get_exegen_url
|
||||
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||
logger.warning(
|
||||
f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to auto update."
|
||||
)
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.3.0"):
|
||||
return "not supported"
|
||||
|
||||
# skip if we can't determine the arch
|
||||
@@ -37,35 +35,33 @@ def agent_update(pk: int) -> str:
|
||||
)
|
||||
return "noarch"
|
||||
|
||||
# removed sqlite in 1.4.0 to get rid of cgo dependency
|
||||
# 1.3.0 has migration func to move from sqlite to win registry, so force an upgrade to 1.3.0 if old agent
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.3.0"):
|
||||
version = settings.LATEST_AGENT_VER
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
version = settings.LATEST_AGENT_VER
|
||||
inno = agent.win_inno_exe
|
||||
|
||||
if codesigntoken is not None and pyver.parse(version) >= pyver.parse("1.5.0"):
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {"version": version, "arch": agent.arch, "token": codesigntoken}
|
||||
url = base_url + urllib.parse.urlencode(params)
|
||||
else:
|
||||
version = "1.3.0"
|
||||
inno = (
|
||||
"winagent-v1.3.0.exe" if agent.arch == "64" else "winagent-v1.3.0-x86.exe"
|
||||
)
|
||||
url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}"
|
||||
url = agent.winagent_dl
|
||||
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
agent.pendingactions.filter(
|
||||
if not force:
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).delete()
|
||||
).exists():
|
||||
agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).delete()
|
||||
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": url,
|
||||
"version": version,
|
||||
"inno": inno,
|
||||
},
|
||||
)
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": url,
|
||||
"version": version,
|
||||
"inno": inno,
|
||||
},
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "agentupdate",
|
||||
@@ -79,12 +75,32 @@ def agent_update(pk: int) -> str:
|
||||
return "created"
|
||||
|
||||
|
||||
@app.task
|
||||
def force_code_sign(pks: list[int]) -> None:
|
||||
try:
|
||||
token = CodeSignToken.objects.first().token
|
||||
except:
|
||||
return
|
||||
|
||||
chunks = (pks[i : i + 50] for i in range(0, len(pks), 50))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent_update(pk=pk, codesigntoken=token, force=True)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
|
||||
@app.task
|
||||
def send_agent_update_task(pks: list[int]) -> None:
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent_update(pk)
|
||||
agent_update(pk, codesigntoken)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
@@ -95,6 +111,11 @@ def auto_self_agent_update_task() -> None:
|
||||
if not core.agent_auto_update:
|
||||
return
|
||||
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
pks: list[int] = [
|
||||
i.pk
|
||||
@@ -105,7 +126,7 @@ def auto_self_agent_update_task() -> None:
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent_update(pk)
|
||||
agent_update(pk, codesigntoken)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
@@ -257,30 +278,13 @@ def run_script_email_results_task(
|
||||
logger.error(e)
|
||||
|
||||
|
||||
def _get_nats_config() -> dict:
|
||||
return {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
}
|
||||
|
||||
|
||||
@app.task
|
||||
def monitor_agents_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ret = [i.agent_id for i in agents if i.status != "online"]
|
||||
config = _get_nats_config()
|
||||
config["agents"] = ret
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", "monitor"]
|
||||
try:
|
||||
subprocess.run(cmd, capture_output=True, timeout=30)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
ids = [i.agent_id for i in agents if i.status != "online"]
|
||||
run_nats_api_cmd("monitor", ids)
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -288,15 +292,5 @@ def get_wmi_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ret = [i.agent_id for i in agents if i.status == "online"]
|
||||
config = _get_nats_config()
|
||||
config["agents"] = ret
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", "wmi"]
|
||||
try:
|
||||
subprocess.run(cmd, capture_output=True, timeout=30)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
ids = [i.agent_id for i in agents if i.status == "online"]
|
||||
run_nats_api_cmd("wmi", ids)
|
||||
|
||||
@@ -152,8 +152,9 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("time.sleep")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_ping(self, nats_cmd):
|
||||
def test_ping(self, nats_cmd, mock_sleep):
|
||||
url = f"/agents/{self.agent.pk}/ping/"
|
||||
|
||||
nats_cmd.return_value = "timeout"
|
||||
@@ -914,8 +915,9 @@ class TestAgentTasks(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.utils.get_exegen_url")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_agent_update(self, nats_cmd):
|
||||
def test_agent_update(self, nats_cmd, get_exe):
|
||||
from agents.tasks import agent_update
|
||||
|
||||
agent_noarch = baker.make_recipe(
|
||||
@@ -926,63 +928,96 @@ class TestAgentTasks(TacticalTestCase):
|
||||
r = agent_update(agent_noarch.pk)
|
||||
self.assertEqual(r, "noarch")
|
||||
|
||||
agent_1111 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.11",
|
||||
)
|
||||
r = agent_update(agent_1111.pk)
|
||||
self.assertEqual(r, "not supported")
|
||||
|
||||
agent64_1112 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.12",
|
||||
)
|
||||
|
||||
r = agent_update(agent64_1112.pk)
|
||||
self.assertEqual(r, "created")
|
||||
action = PendingAction.objects.get(agent__pk=agent64_1112.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
self.assertEqual(
|
||||
action.details["url"],
|
||||
"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
)
|
||||
self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe")
|
||||
self.assertEqual(action.details["version"], "1.3.0")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
"version": "1.3.0",
|
||||
"inno": "winagent-v1.3.0.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
|
||||
agent_64_130 = baker.make_recipe(
|
||||
agent_130 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.3.0",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
r = agent_update(agent_64_130.pk)
|
||||
r = agent_update(agent_130.pk)
|
||||
self.assertEqual(r, "not supported")
|
||||
|
||||
# test __without__ code signing
|
||||
agent64_nosign = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.4.14",
|
||||
)
|
||||
|
||||
r = agent_update(agent64_nosign.pk, None)
|
||||
self.assertEqual(r, "created")
|
||||
action = PendingAction.objects.get(agent__pk=agent64_nosign.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
self.assertEqual(
|
||||
action.details["url"],
|
||||
f"https://github.com/wh1te909/rmmagent/releases/download/v{settings.LATEST_AGENT_VER}/winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
)
|
||||
self.assertEqual(
|
||||
action.details["inno"], f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||
)
|
||||
self.assertEqual(action.details["version"], settings.LATEST_AGENT_VER)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": settings.DL_64,
|
||||
"url": f"https://github.com/wh1te909/rmmagent/releases/download/v{settings.LATEST_AGENT_VER}/winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
action = PendingAction.objects.get(agent__pk=agent_64_130.pk)
|
||||
|
||||
# test __with__ code signing (64 bit)
|
||||
codesign = baker.make("core.CodeSignToken", token="testtoken123")
|
||||
agent64_sign = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.4.14",
|
||||
)
|
||||
|
||||
nats_cmd.return_value = "ok"
|
||||
get_exe.return_value = "https://exe.tacticalrmm.io"
|
||||
r = agent_update(agent64_sign.pk, codesign.token) # type: ignore
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=64&token=testtoken123", # type: ignore
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
action = PendingAction.objects.get(agent__pk=agent64_sign.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
# test __with__ code signing (32 bit)
|
||||
agent32_sign = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 32 bit (build 19041.450)",
|
||||
version="1.4.14",
|
||||
)
|
||||
|
||||
nats_cmd.return_value = "ok"
|
||||
get_exe.return_value = "https://exe.tacticalrmm.io"
|
||||
r = agent_update(agent32_sign.pk, codesign.token) # type: ignore
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=32&token=testtoken123", # type: ignore
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
action = PendingAction.objects.get(agent__pk=agent32_sign.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
|
||||
37
api/tacticalrmm/agents/utils.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import random
|
||||
import urllib.parse
|
||||
|
||||
import requests
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
def get_exegen_url() -> str:
|
||||
urls: list[str] = settings.EXE_GEN_URLS
|
||||
for url in urls:
|
||||
try:
|
||||
r = requests.get(url, timeout=10)
|
||||
except:
|
||||
continue
|
||||
|
||||
if r.status_code == 200:
|
||||
return url
|
||||
|
||||
return random.choice(urls)
|
||||
|
||||
|
||||
def get_winagent_url(arch: str) -> str:
|
||||
from core.models import CodeSignToken
|
||||
|
||||
try:
|
||||
codetoken = CodeSignToken.objects.first().token
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"arch": arch,
|
||||
"token": codetoken,
|
||||
}
|
||||
dl_url = base_url + urllib.parse.urlencode(params)
|
||||
except:
|
||||
dl_url = settings.DL_64 if arch == "64" else settings.DL_32
|
||||
|
||||
return dl_url
|
||||
@@ -3,6 +3,7 @@ import datetime as dt
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
@@ -40,7 +41,7 @@ logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@api_view()
|
||||
def get_agent_versions(request):
|
||||
agents = Agent.objects.only("pk")
|
||||
agents = Agent.objects.prefetch_related("site").only("pk", "hostname")
|
||||
return Response(
|
||||
{
|
||||
"versions": [settings.LATEST_AGENT_VER],
|
||||
@@ -65,9 +66,18 @@ def update_agents(request):
|
||||
def ping(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
status = "offline"
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
attempts = 0
|
||||
while 1:
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=2))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
break
|
||||
else:
|
||||
attempts += 1
|
||||
time.sleep(1)
|
||||
|
||||
if attempts >= 5:
|
||||
break
|
||||
|
||||
return Response({"name": agent.hostname, "status": status})
|
||||
|
||||
@@ -355,6 +365,8 @@ class Reboot(APIView):
|
||||
def install_agent(request):
|
||||
from knox.models import AuthToken
|
||||
|
||||
from agents.utils import get_winagent_url
|
||||
|
||||
client_id = request.data["client"]
|
||||
site_id = request.data["site"]
|
||||
version = settings.LATEST_AGENT_VER
|
||||
@@ -375,7 +387,7 @@ def install_agent(request):
|
||||
inno = (
|
||||
f"winagent-v{version}.exe" if arch == "64" else f"winagent-v{version}-x86.exe"
|
||||
)
|
||||
download_url = settings.DL_64 if arch == "64" else settings.DL_32
|
||||
download_url = get_winagent_url(arch)
|
||||
|
||||
_, token = AuthToken.objects.create(
|
||||
user=request.user, expiry=dt.timedelta(hours=request.data["expires"])
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
from django.conf import settings
|
||||
@@ -297,7 +298,7 @@ class Alert(models.Model):
|
||||
if alert_template and alert_template.action and not alert.action_run:
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert_template.action_args,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
timeout=alert_template.action_timeout,
|
||||
wait=True,
|
||||
full=True,
|
||||
@@ -406,7 +407,7 @@ class Alert(models.Model):
|
||||
):
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.resolved_action.pk,
|
||||
args=alert_template.resolved_action_args,
|
||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
wait=True,
|
||||
full=True,
|
||||
@@ -428,6 +429,36 @@ class Alert(models.Model):
|
||||
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert"
|
||||
)
|
||||
|
||||
def parse_script_args(self, args: list[str]):
|
||||
|
||||
if not args:
|
||||
return []
|
||||
|
||||
temp_args = list()
|
||||
# pattern to match for injection
|
||||
pattern = re.compile(".*\\{\\{alert\\.(.*)\\}\\}.*")
|
||||
|
||||
for arg in args:
|
||||
match = pattern.match(arg)
|
||||
if match:
|
||||
name = match.group(1)
|
||||
|
||||
if hasattr(self, name):
|
||||
value = getattr(self, name)
|
||||
else:
|
||||
continue
|
||||
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", "'" + value + "'", arg)) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
|
||||
else:
|
||||
temp_args.append(arg)
|
||||
|
||||
return temp_args
|
||||
|
||||
|
||||
class AlertTemplate(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
@@ -5,6 +5,7 @@ from unittest.mock import patch
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
@@ -203,3 +204,138 @@ class TestAPIv3(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json(), {"mode": "rpc", "shellcmd": ""})
|
||||
reload_nats.assert_called_once()
|
||||
|
||||
def test_task_runner_get(self):
|
||||
from autotasks.serializers import TaskGOGetSerializer
|
||||
|
||||
r = self.client.get("/api/v3/500/asdf9df9dfdf/taskrunner/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
|
||||
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(TaskGOGetSerializer(task).data, r.data) # type: ignore
|
||||
|
||||
def test_task_runner_results(self):
|
||||
from agents.models import AgentCustomField
|
||||
|
||||
r = self.client.patch("/api/v3/500/asdf9df9dfdf/taskrunner/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
|
||||
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
|
||||
|
||||
# test passing task
|
||||
data = {
|
||||
"stdout": "test test \ntestest stdgsd\n",
|
||||
"stderr": "",
|
||||
"retcode": 0,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "passing") # type: ignore
|
||||
|
||||
# test failing task
|
||||
data = {
|
||||
"stdout": "test test \ntestest stdgsd\n",
|
||||
"stderr": "",
|
||||
"retcode": 1,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
|
||||
|
||||
# test collector task
|
||||
text = baker.make("core.CustomField", model="agent", type="text", name="Test")
|
||||
boolean = baker.make(
|
||||
"core.CustomField", model="agent", type="checkbox", name="Test1"
|
||||
)
|
||||
multiple = baker.make(
|
||||
"core.CustomField", model="agent", type="multiple", name="Test2"
|
||||
)
|
||||
|
||||
# test text fields
|
||||
task.custom_field = text # type: ignore
|
||||
task.save() # type: ignore
|
||||
|
||||
# test failing failing with stderr
|
||||
data = {
|
||||
"stdout": "test test \nthe last line",
|
||||
"stderr": "This is an error",
|
||||
"retcode": 1,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
|
||||
|
||||
# test saving to text field
|
||||
data = {
|
||||
"stdout": "test test \nthe last line",
|
||||
"stderr": "",
|
||||
"retcode": 0,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||
self.assertEqual(AgentCustomField.objects.get(field=text, agent=task.agent).value, "the last line") # type: ignore
|
||||
|
||||
# test saving to checkbox field
|
||||
task.custom_field = boolean # type: ignore
|
||||
task.save() # type: ignore
|
||||
|
||||
data = {
|
||||
"stdout": "1",
|
||||
"stderr": "",
|
||||
"retcode": 0,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||
self.assertTrue(AgentCustomField.objects.get(field=boolean, agent=task.agent).value) # type: ignore
|
||||
|
||||
# test saving to multiple field with commas
|
||||
task.custom_field = multiple # type: ignore
|
||||
task.save() # type: ignore
|
||||
|
||||
data = {
|
||||
"stdout": "this,is,an,array",
|
||||
"stderr": "",
|
||||
"retcode": 0,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this", "is", "an", "array"]) # type: ignore
|
||||
|
||||
# test mutiple with a single value
|
||||
data = {
|
||||
"stdout": "this",
|
||||
"stderr": "",
|
||||
"retcode": 0,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this"]) # type: ignore
|
||||
|
||||
@@ -15,7 +15,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from agents.models import Agent
|
||||
from agents.models import Agent, AgentCustomField
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
@@ -65,9 +65,17 @@ class CheckIn(APIView):
|
||||
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
||||
Alert.handle_alert_resolve(agent)
|
||||
|
||||
# get any pending actions
|
||||
if agent.pendingactions.filter(status="pending").exists(): # type: ignore
|
||||
agent.handle_pending_actions()
|
||||
# sync scheduled tasks
|
||||
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
|
||||
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
|
||||
|
||||
for task in tasks:
|
||||
if task.sync_status == "pendingdeletion":
|
||||
task.delete_task_on_agent()
|
||||
elif task.sync_status == "initial":
|
||||
task.modify_task_on_agent()
|
||||
elif task.sync_status == "notsynced":
|
||||
task.create_task_on_agent()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -351,11 +359,42 @@ class TaskRunner(APIView):
|
||||
instance=task, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
new_task = serializer.save(last_run=djangotime.now())
|
||||
|
||||
status = "failing" if task.retcode != 0 else "passing"
|
||||
# check if task is a collector and update the custom field
|
||||
if task.custom_field:
|
||||
if not task.stderr:
|
||||
|
||||
if AgentCustomField.objects.filter(
|
||||
field=task.custom_field, agent=task.agent
|
||||
).exists():
|
||||
agent_field = AgentCustomField.objects.get(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
else:
|
||||
agent_field = AgentCustomField.objects.create(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
|
||||
# get last line of stdout
|
||||
value = new_task.stdout.split("\n")[-1].strip()
|
||||
|
||||
if task.custom_field.type in ["text", "number", "single", "datetime"]:
|
||||
agent_field.string_value = value
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "multiple":
|
||||
agent_field.multiple_value = value.split(",")
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "checkbox":
|
||||
agent_field.bool_value = bool(value)
|
||||
agent_field.save()
|
||||
|
||||
status = "passing"
|
||||
else:
|
||||
status = "failing"
|
||||
else:
|
||||
status = "failing" if task.retcode != 0 else "passing"
|
||||
|
||||
new_task: AutomatedTask = AutomatedTask.objects.get(pk=task.pk)
|
||||
new_task.status = status
|
||||
new_task.save()
|
||||
|
||||
@@ -393,7 +432,7 @@ class SysInfo(APIView):
|
||||
|
||||
|
||||
class MeshExe(APIView):
|
||||
""" Sends the mesh exe to the installer """
|
||||
"""Sends the mesh exe to the installer"""
|
||||
|
||||
def post(self, request):
|
||||
exe = "meshagent.exe" if request.data["arch"] == "64" else "meshagent-x86.exe"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from django.db import models
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from django.db import models
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
|
||||
@@ -29,7 +28,8 @@ class Policy(BaseAuditModel):
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_from_policies_task
|
||||
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old policy if exists
|
||||
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
@@ -38,8 +38,8 @@ class Policy(BaseAuditModel):
|
||||
# generate agent checks only if active and enforced were changed
|
||||
if old_policy:
|
||||
if old_policy.active != self.active or old_policy.enforced != self.enforced:
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
policypk=self.pk,
|
||||
generate_agent_checks_task.delay(
|
||||
policy=self.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -52,7 +52,10 @@ class Policy(BaseAuditModel):
|
||||
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
|
||||
generate_agent_checks_task.delay(agents, create_tasks=True)
|
||||
generate_agent_checks_task.delay(agents=agents, create_tasks=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def is_default_server_policy(self):
|
||||
@@ -62,9 +65,6 @@ class Policy(BaseAuditModel):
|
||||
def is_default_workstation_policy(self):
|
||||
return self.default_workstation_policy.exists() # type: ignore
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def is_agent_excluded(self, agent):
|
||||
return (
|
||||
agent in self.excluded_agents.all()
|
||||
@@ -94,20 +94,29 @@ class Policy(BaseAuditModel):
|
||||
|
||||
filtered_agents_pks = Policy.objects.none()
|
||||
|
||||
filtered_agents_pks |= Agent.objects.filter(
|
||||
site__in=[
|
||||
site
|
||||
for site in explicit_sites
|
||||
if site.client not in explicit_clients
|
||||
and site.client not in self.excluded_clients.all()
|
||||
],
|
||||
monitoring_type=mon_type,
|
||||
).values_list("pk", flat=True)
|
||||
filtered_agents_pks |= (
|
||||
Agent.objects.exclude(block_policy_inheritance=True)
|
||||
.filter(
|
||||
site__in=[
|
||||
site
|
||||
for site in explicit_sites
|
||||
if site.client not in explicit_clients
|
||||
and site.client not in self.excluded_clients.all()
|
||||
],
|
||||
monitoring_type=mon_type,
|
||||
)
|
||||
.values_list("pk", flat=True)
|
||||
)
|
||||
|
||||
filtered_agents_pks |= Agent.objects.filter(
|
||||
site__client__in=[client for client in explicit_clients],
|
||||
monitoring_type=mon_type,
|
||||
).values_list("pk", flat=True)
|
||||
filtered_agents_pks |= (
|
||||
Agent.objects.exclude(block_policy_inheritance=True)
|
||||
.exclude(site__block_policy_inheritance=True)
|
||||
.filter(
|
||||
site__client__in=[client for client in explicit_clients],
|
||||
monitoring_type=mon_type,
|
||||
)
|
||||
.values_list("pk", flat=True)
|
||||
)
|
||||
|
||||
return Agent.objects.filter(
|
||||
models.Q(pk__in=filtered_agents_pks)
|
||||
@@ -123,9 +132,6 @@ class Policy(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def cascade_policy_tasks(agent):
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
from logs.models import PendingAction
|
||||
|
||||
# List of all tasks to be applied
|
||||
tasks = list()
|
||||
@@ -154,6 +160,17 @@ class Policy(BaseAuditModel):
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
|
||||
# check if client/site/agent is blocking inheritance and blank out policies
|
||||
if agent.block_policy_inheritance:
|
||||
site_policy = None
|
||||
client_policy = None
|
||||
default_policy = None
|
||||
elif site.block_policy_inheritance:
|
||||
client_policy = None
|
||||
default_policy = None
|
||||
elif client.block_policy_inheritance:
|
||||
default_policy = None
|
||||
|
||||
if (
|
||||
agent_policy
|
||||
and agent_policy.active
|
||||
@@ -200,26 +217,16 @@ class Policy(BaseAuditModel):
|
||||
if taskpk not in added_task_pks
|
||||
]
|
||||
):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
if task.sync_status == "initial":
|
||||
task.delete()
|
||||
else:
|
||||
task.sync_status = "pendingdeletion"
|
||||
task.save()
|
||||
|
||||
# handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline
|
||||
for action in agent.pendingactions.filter(action_type="taskaction").exclude(
|
||||
status="completed"
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=action.details["task_id"])
|
||||
if (
|
||||
task.parent_task in agent_tasks_parent_pks
|
||||
and task.parent_task in added_task_pks
|
||||
):
|
||||
agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=agent,
|
||||
action_type="taskaction",
|
||||
details={"action": "taskcreate", "task_id": task.id},
|
||||
).save()
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
# change tasks from pendingdeletion to notsynced if policy was added or changed
|
||||
agent.autotasks.filter(sync_status="pendingdeletion").filter(
|
||||
parent_task__in=[taskpk for taskpk in added_task_pks]
|
||||
).update(sync_status="notsynced")
|
||||
|
||||
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
||||
|
||||
@@ -251,6 +258,17 @@ class Policy(BaseAuditModel):
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
|
||||
# check if client/site/agent is blocking inheritance and blank out policies
|
||||
if agent.block_policy_inheritance:
|
||||
site_policy = None
|
||||
client_policy = None
|
||||
default_policy = None
|
||||
elif site.block_policy_inheritance:
|
||||
client_policy = None
|
||||
default_policy = None
|
||||
elif client.block_policy_inheritance:
|
||||
default_policy = None
|
||||
|
||||
# Used to hold the policies that will be applied and the order in which they are applied
|
||||
# Enforced policies are applied first
|
||||
enforced_checks = list()
|
||||
@@ -412,11 +430,12 @@ class Policy(BaseAuditModel):
|
||||
|
||||
# remove policy checks from agent that fell out of policy scope
|
||||
agent.agentchecks.filter(
|
||||
managed_by_policy=True,
|
||||
parent_check__in=[
|
||||
checkpk
|
||||
for checkpk in agent_checks_parent_pks
|
||||
if checkpk not in [check.pk for check in final_list]
|
||||
]
|
||||
],
|
||||
).delete()
|
||||
|
||||
return [
|
||||
|
||||
@@ -1,169 +1,143 @@
|
||||
from agents.models import Agent
|
||||
from automation.models import Policy
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from typing import Any, Dict, List, Union
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on agents affected by a policy and optionally generate automated tasks
|
||||
def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||
def generate_agent_checks_task(
|
||||
policy: int = None,
|
||||
site: int = None,
|
||||
client: int = None,
|
||||
agents: List[int] = list(),
|
||||
all: bool = False,
|
||||
create_tasks: bool = False,
|
||||
) -> Union[str, None]:
|
||||
from agents.models import Agent
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
from automation.models import Policy
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
p = Policy.objects.get(pk=policy) if policy else None
|
||||
|
||||
# generate checks on all agents if all is specified or if policy is default server/workstation policy
|
||||
if (p and p.is_default_server_policy and p.is_default_workstation_policy) or all:
|
||||
a = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
|
||||
# generate checks on all servers if policy is a default servers policy
|
||||
elif p and p.is_default_server_policy:
|
||||
a = Agent.objects.filter(monitoring_type="server").only("pk", "monitoring_type")
|
||||
|
||||
# generate checks on all workstations if policy is a default workstations policy
|
||||
elif p and p.is_default_workstation_policy:
|
||||
a = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
|
||||
# generate checks on a list of supplied agents
|
||||
elif agents:
|
||||
a = Agent.objects.filter(pk__in=agents)
|
||||
|
||||
# generate checks on agents affected by supplied policy
|
||||
elif policy:
|
||||
a = p.related_agents().only("pk")
|
||||
|
||||
# generate checks that has specified site
|
||||
elif site:
|
||||
a = Agent.objects.filter(site_id=site)
|
||||
|
||||
# generate checks that has specified client
|
||||
elif client:
|
||||
a = Agent.objects.filter(site__client_id=client)
|
||||
else:
|
||||
agents = policy.related_agents().only("pk")
|
||||
a = []
|
||||
|
||||
for agent in agents:
|
||||
for agent in a:
|
||||
agent.generate_checks_from_policies()
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on a list of agents and optionally generate automated tasks
|
||||
def generate_agent_checks_task(agentpks, create_tasks=False):
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on agent servers or workstations within a certain client or site and optionally generate automated tasks
|
||||
def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False):
|
||||
|
||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on all agent servers or workstations and optionally generate automated tasks
|
||||
def generate_all_agent_checks_task(mon_type, create_tasks=False):
|
||||
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# deletes a policy managed check from all agents
|
||||
def delete_policy_check_task(checkpk):
|
||||
|
||||
Check.objects.filter(parent_check=checkpk).delete()
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
# updates policy managed check fields on agents
|
||||
def update_policy_check_fields_task(checkpk):
|
||||
def update_policy_check_fields_task(check: int) -> str:
|
||||
from checks.models import Check
|
||||
|
||||
check = Check.objects.get(pk=checkpk)
|
||||
c: Check = Check.objects.get(pk=check)
|
||||
update_fields: Dict[Any, Any] = {}
|
||||
|
||||
Check.objects.filter(parent_check=checkpk).update(
|
||||
warning_threshold=check.warning_threshold,
|
||||
error_threshold=check.error_threshold,
|
||||
alert_severity=check.alert_severity,
|
||||
name=check.name,
|
||||
run_interval=check.run_interval,
|
||||
disk=check.disk,
|
||||
fails_b4_alert=check.fails_b4_alert,
|
||||
ip=check.ip,
|
||||
script=check.script,
|
||||
script_args=check.script_args,
|
||||
info_return_codes=check.info_return_codes,
|
||||
warning_return_codes=check.warning_return_codes,
|
||||
timeout=check.timeout,
|
||||
pass_if_start_pending=check.pass_if_start_pending,
|
||||
pass_if_svc_not_exist=check.pass_if_svc_not_exist,
|
||||
restart_if_stopped=check.restart_if_stopped,
|
||||
log_name=check.log_name,
|
||||
event_id=check.event_id,
|
||||
event_id_is_wildcard=check.event_id_is_wildcard,
|
||||
event_type=check.event_type,
|
||||
event_source=check.event_source,
|
||||
event_message=check.event_message,
|
||||
fail_when=check.fail_when,
|
||||
search_last_days=check.search_last_days,
|
||||
number_of_events_b4_alert=check.number_of_events_b4_alert,
|
||||
email_alert=check.email_alert,
|
||||
text_alert=check.text_alert,
|
||||
dashboard_alert=check.dashboard_alert,
|
||||
)
|
||||
for field in c.policy_fields_to_copy:
|
||||
update_fields[field] = getattr(c, field)
|
||||
|
||||
Check.objects.filter(parent_check=check).update(**update_fields)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy tasks on agents affected by a policy
|
||||
def generate_agent_tasks_from_policies_task(policypk):
|
||||
def generate_agent_autotasks_task(policy: int = None) -> str:
|
||||
from agents.models import Agent
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
from automation.models import Policy
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
p: Policy = Policy.objects.get(pk=policy)
|
||||
|
||||
if p and p.is_default_server_policy and p.is_default_workstation_policy:
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
elif p and p.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
elif p and p.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents().only("pk")
|
||||
agents = p.related_agents().only("pk")
|
||||
|
||||
for agent in agents:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def delete_policy_autotask_task(taskpk):
|
||||
def delete_policy_autotasks_task(task: int) -> str:
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
|
||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
for t in AutomatedTask.objects.filter(parent_task=task):
|
||||
t.delete_task_on_agent()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def run_win_policy_autotask_task(task_pks):
|
||||
from autotasks.tasks import run_win_task
|
||||
def run_win_policy_autotasks_task(task: int) -> str:
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
for task in task_pks:
|
||||
run_win_task.delay(task)
|
||||
for t in AutomatedTask.objects.filter(parent_task=task):
|
||||
t.run_win_task()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def update_policy_task_fields_task(taskpk, update_agent=False):
|
||||
from autotasks.tasks import enable_or_disable_win_task
|
||||
def update_policy_autotasks_fields_task(task: int, update_agent: bool = False) -> str:
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
task = AutomatedTask.objects.get(pk=taskpk)
|
||||
t = AutomatedTask.objects.get(pk=task)
|
||||
update_fields: Dict[str, Any] = {}
|
||||
|
||||
AutomatedTask.objects.filter(parent_task=taskpk).update(
|
||||
alert_severity=task.alert_severity,
|
||||
email_alert=task.email_alert,
|
||||
text_alert=task.text_alert,
|
||||
dashboard_alert=task.dashboard_alert,
|
||||
script=task.script,
|
||||
script_args=task.script_args,
|
||||
name=task.name,
|
||||
timeout=task.timeout,
|
||||
enabled=task.enabled,
|
||||
)
|
||||
for field in t.policy_fields_to_copy:
|
||||
update_fields[field] = getattr(t, field)
|
||||
|
||||
AutomatedTask.objects.filter(parent_task=task).update(**update_fields)
|
||||
|
||||
if update_agent:
|
||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
||||
enable_or_disable_win_task.delay(task.pk, task.enabled)
|
||||
for t in AutomatedTask.objects.filter(parent_task=task).exclude(
|
||||
sync_status="initial"
|
||||
):
|
||||
t.modify_task_on_agent()
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -52,7 +52,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_policy(self):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_add_policy(self, create_task):
|
||||
url = "/automation/policies/"
|
||||
|
||||
data = {
|
||||
@@ -90,8 +91,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("automation.tasks.generate_agent_checks_from_policies_task.delay")
|
||||
def test_update_policy(self, generate_agent_checks_from_policies_task):
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_update_policy(self, generate_agent_checks_task):
|
||||
# returns 404 for invalid policy pk
|
||||
resp = self.client.put("/automation/policies/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
@@ -110,7 +111,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# only called if active or enforced are updated
|
||||
generate_agent_checks_from_policies_task.assert_not_called()
|
||||
generate_agent_checks_task.assert_not_called()
|
||||
|
||||
data = {
|
||||
"name": "Test Policy Update",
|
||||
@@ -121,8 +122,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
generate_agent_checks_from_policies_task.assert_called_with(
|
||||
policypk=policy.pk, create_tasks=True # type: ignore
|
||||
generate_agent_checks_task.assert_called_with(
|
||||
policy=policy.pk, create_tasks=True # type: ignore
|
||||
)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
@@ -145,7 +146,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
generate_agent_checks_task.assert_called_with(
|
||||
[agent.pk for agent in agents], create_tasks=True
|
||||
agents=[agent.pk for agent in agents], create_tasks=True
|
||||
)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
@@ -271,7 +272,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("automation.tasks.run_win_policy_autotask_task.delay")
|
||||
@patch("automation.tasks.run_win_policy_autotasks_task.delay")
|
||||
def test_run_win_task(self, mock_task):
|
||||
|
||||
# create managed policy tasks
|
||||
@@ -281,11 +282,12 @@ class TestPolicyViews(TacticalTestCase):
|
||||
parent_task=1,
|
||||
_quantity=6,
|
||||
)
|
||||
|
||||
url = "/automation/runwintask/1/"
|
||||
resp = self.client.put(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
mock_task.assert_called_once_with([task.pk for task in tasks]) # type: ignore
|
||||
mock_task.assert_called() # type: ignore
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@@ -426,7 +428,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("automation.tasks.generate_agent_checks_from_policies_task.delay")
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_sync_policy(self, generate_checks):
|
||||
url = "/automation/sync/"
|
||||
|
||||
@@ -441,7 +443,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
generate_checks.assert_called_with(policy.pk, create_tasks=True) # type: ignore
|
||||
generate_checks.assert_called_with(policy=policy.pk, create_tasks=True) # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -497,7 +499,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEquals(len(resp.data["agents"]), 10) # type: ignore
|
||||
|
||||
def test_generating_agent_policy_checks(self):
|
||||
from .tasks import generate_agent_checks_from_policies_task
|
||||
from .tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
@@ -505,7 +507,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent", policy=policy)
|
||||
|
||||
# test policy assigned to agent
|
||||
generate_agent_checks_from_policies_task(policy.id) # type: ignore
|
||||
generate_agent_checks_task(policy=policy.id) # type: ignore
|
||||
|
||||
# make sure all checks were created. should be 7
|
||||
agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all()
|
||||
@@ -545,7 +547,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEqual(check.event_type, checks[6].event_type)
|
||||
|
||||
def test_generating_agent_policy_checks_with_enforced(self):
|
||||
from .tasks import generate_agent_checks_from_policies_task
|
||||
from .tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True, enforced=True)
|
||||
@@ -555,7 +557,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
||||
self.create_checks(agent=agent, script=script)
|
||||
|
||||
generate_agent_checks_from_policies_task(policy.id, create_tasks=True) # type: ignore
|
||||
generate_agent_checks_task(policy=policy.id, create_tasks=True) # type: ignore
|
||||
|
||||
# make sure each agent check says overriden_by_policy
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 14)
|
||||
@@ -566,13 +568,12 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
7,
|
||||
)
|
||||
|
||||
@patch("automation.tasks.generate_agent_checks_by_location_task.delay")
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_generating_agent_policy_checks_by_location(
|
||||
self, generate_agent_checks_by_location_task
|
||||
self, generate_agent_checks_mock, create_task
|
||||
):
|
||||
from automation.tasks import (
|
||||
generate_agent_checks_by_location_task as generate_agent_checks,
|
||||
)
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
@@ -596,16 +597,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
workstation_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
client=workstation_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_task(
|
||||
client=workstation_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -620,16 +619,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
workstation_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
client=workstation_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_task(
|
||||
client=workstation_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -644,16 +641,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
server_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
client=server_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_task(
|
||||
client=server_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -668,16 +663,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
server_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
client=server_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_task(
|
||||
client=server_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -692,16 +685,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
workstation_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
site=workstation_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_task(
|
||||
site=workstation_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -716,16 +707,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
workstation_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
site=workstation_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_task(
|
||||
site=workstation_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -740,16 +729,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
server_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
site=server_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_task(
|
||||
site=server_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -764,16 +751,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
server_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
site=server_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_task(
|
||||
site=server_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -783,13 +768,10 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||
)
|
||||
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
def test_generating_policy_checks_for_all_agents(
|
||||
self, generate_all_agent_checks_task
|
||||
):
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_generating_policy_checks_for_all_agents(self, generate_agent_checks_mock):
|
||||
from core.models import CoreSettings
|
||||
|
||||
from .tasks import generate_all_agent_checks_task as generate_all_checks
|
||||
from .tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
@@ -801,11 +783,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
core.server_policy = policy
|
||||
core.save()
|
||||
|
||||
generate_all_agent_checks_task.assert_called_with(
|
||||
mon_type="server", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_all_checks(mon_type="server", create_tasks=True)
|
||||
generate_agent_checks_mock.assert_called_with(all=True, create_tasks=True)
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
generate_agent_checks_task(all=True, create_tasks=True)
|
||||
|
||||
# all servers should have 7 checks
|
||||
for agent in server_agents:
|
||||
@@ -818,15 +798,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
core.workstation_policy = policy
|
||||
core.save()
|
||||
|
||||
generate_all_agent_checks_task.assert_any_call(
|
||||
mon_type="workstation", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.assert_any_call(
|
||||
mon_type="server", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_all_checks(mon_type="server", create_tasks=True)
|
||||
generate_all_checks(mon_type="workstation", create_tasks=True)
|
||||
generate_agent_checks_mock.assert_any_call(all=True, create_tasks=True)
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
generate_agent_checks_task(all=True, create_tasks=True)
|
||||
|
||||
# all workstations should have 7 checks
|
||||
for agent in server_agents:
|
||||
@@ -838,11 +812,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
core.workstation_policy = None
|
||||
core.save()
|
||||
|
||||
generate_all_agent_checks_task.assert_called_with(
|
||||
mon_type="workstation", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_all_checks(mon_type="workstation", create_tasks=True)
|
||||
generate_agent_checks_mock.assert_called_with(all=True, create_tasks=True)
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
generate_agent_checks_task(all=True, create_tasks=True)
|
||||
|
||||
# nothing should have the checks
|
||||
for agent in server_agents:
|
||||
@@ -851,31 +823,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
for agent in workstation_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||
|
||||
def test_delete_policy_check(self):
|
||||
from .models import Policy
|
||||
from .tasks import delete_policy_check_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
self.create_checks(policy=policy)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
# make sure agent has 7 checks
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||
|
||||
# pick a policy check and delete it from the agent
|
||||
policy_check_id = Policy.objects.get(pk=policy.id).policychecks.first().id # type: ignore
|
||||
|
||||
delete_policy_check_task(policy_check_id)
|
||||
|
||||
# make sure policy check doesn't exist on agent
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 6)
|
||||
self.assertFalse(
|
||||
Agent.objects.get(pk=agent.id)
|
||||
.agentchecks.filter(parent_check=policy_check_id)
|
||||
.exists()
|
||||
)
|
||||
|
||||
def update_policy_check_fields(self):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def update_policy_check_fields(self, create_task):
|
||||
from .models import Policy
|
||||
from .tasks import update_policy_check_fields_task
|
||||
|
||||
@@ -905,8 +854,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
"12.12.12.12",
|
||||
)
|
||||
|
||||
def test_generate_agent_tasks(self):
|
||||
from .tasks import generate_agent_tasks_from_policies_task
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_generate_agent_tasks(self, create_task):
|
||||
from .tasks import generate_agent_autotasks_task
|
||||
|
||||
# create test data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
@@ -915,7 +865,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
generate_agent_tasks_from_policies_task(policy.id) # type: ignore
|
||||
generate_agent_autotasks_task(policy=policy.id) # type: ignore
|
||||
|
||||
agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all()
|
||||
|
||||
@@ -934,56 +884,61 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEqual(task.parent_task, tasks[2].id) # type: ignore
|
||||
self.assertEqual(task.name, tasks[2].name) # type: ignore
|
||||
|
||||
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
||||
def test_delete_policy_tasks(self, delete_win_task_schedule):
|
||||
from .tasks import delete_policy_autotask_task
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.delete_task_on_agent")
|
||||
def test_delete_policy_tasks(self, delete_task_on_agent, create_task):
|
||||
from .tasks import delete_policy_autotasks_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
delete_policy_autotask_task(tasks[0].id) # type: ignore
|
||||
delete_policy_autotasks_task(task=tasks[0].id) # type: ignore
|
||||
|
||||
delete_win_task_schedule.assert_called_with(
|
||||
agent.autotasks.get(parent_task=tasks[0].id).id # type: ignore
|
||||
)
|
||||
delete_task_on_agent.assert_called()
|
||||
|
||||
@patch("autotasks.tasks.run_win_task.delay")
|
||||
def test_run_policy_task(self, run_win_task):
|
||||
from .tasks import run_win_policy_autotask_task
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.run_win_task")
|
||||
def test_run_policy_task(self, run_win_task, create_task):
|
||||
from .tasks import run_win_policy_autotasks_task
|
||||
|
||||
tasks = baker.make("autotasks.AutomatedTask", _quantity=3)
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
run_win_policy_autotask_task([task.id for task in tasks]) # type: ignore
|
||||
run_win_policy_autotasks_task(task=tasks[0].id) # type: ignore
|
||||
|
||||
run_win_task.side_effect = [task.id for task in tasks] # type: ignore
|
||||
self.assertEqual(run_win_task.call_count, 3)
|
||||
for task in tasks: # type: ignore
|
||||
run_win_task.assert_any_call(task.id) # type: ignore
|
||||
run_win_task.assert_called_once()
|
||||
|
||||
@patch("autotasks.tasks.enable_or_disable_win_task.delay")
|
||||
def test_update_policy_tasks(self, enable_or_disable_win_task):
|
||||
from .tasks import update_policy_task_fields_task
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.modify_task_on_agent")
|
||||
def test_update_policy_tasks(self, modify_task_on_agent, create_task):
|
||||
from .tasks import update_policy_autotasks_fields_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make(
|
||||
"autotasks.AutomatedTask", enabled=True, policy=policy, _quantity=3
|
||||
"autotasks.AutomatedTask",
|
||||
enabled=True,
|
||||
policy=policy,
|
||||
_quantity=3,
|
||||
)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
tasks[0].enabled = False # type: ignore
|
||||
tasks[0].save() # type: ignore
|
||||
|
||||
update_policy_task_fields_task(tasks[0].id) # type: ignore
|
||||
enable_or_disable_win_task.assert_not_called()
|
||||
update_policy_autotasks_fields_task(task=tasks[0].id) # type: ignore
|
||||
modify_task_on_agent.assert_not_called()
|
||||
|
||||
self.assertFalse(agent.autotasks.get(parent_task=tasks[0].id).enabled) # type: ignore
|
||||
|
||||
update_policy_task_fields_task(tasks[0].id, update_agent=True) # type: ignore
|
||||
enable_or_disable_win_task.assert_called_with(
|
||||
agent.autotasks.get(parent_task=tasks[0].id).id, False # type: ignore
|
||||
)
|
||||
update_policy_autotasks_fields_task(task=tasks[0].id, update_agent=True) # type: ignore
|
||||
modify_task_on_agent.assert_not_called()
|
||||
|
||||
agent.autotasks.update(sync_status="synced")
|
||||
update_policy_autotasks_fields_task(task=tasks[0].id, update_agent=True) # type: ignore
|
||||
modify_task_on_agent.assert_called_once()
|
||||
|
||||
@patch("agents.models.Agent.generate_tasks_from_policies")
|
||||
@patch("agents.models.Agent.generate_checks_from_policies")
|
||||
@@ -996,17 +951,19 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
generate_checks.reset_mock()
|
||||
generate_tasks.reset_mock()
|
||||
|
||||
generate_agent_checks_task([agent.pk for agent in agents])
|
||||
generate_agent_checks_task(agents=[agent.pk for agent in agents])
|
||||
self.assertEquals(generate_checks.call_count, 5)
|
||||
generate_tasks.assert_not_called()
|
||||
generate_checks.reset_mock()
|
||||
|
||||
generate_agent_checks_task([agent.pk for agent in agents], create_tasks=True)
|
||||
generate_agent_checks_task(
|
||||
agents=[agent.pk for agent in agents], create_tasks=True
|
||||
)
|
||||
self.assertEquals(generate_checks.call_count, 5)
|
||||
self.assertEquals(generate_checks.call_count, 5)
|
||||
|
||||
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
||||
def test_policy_exclusions(self, delete_task):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_policy_exclusions(self, create_task):
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
baker.make_recipe("checks.memory_check", policy=policy)
|
||||
@@ -1028,8 +985,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
self.assertEqual(policy.related_agents().count(), 0) # type: ignore
|
||||
self.assertEqual(agent.agentchecks.count(), 0) # type: ignore
|
||||
delete_task.assert_called()
|
||||
delete_task.reset_mock()
|
||||
|
||||
# delete agent tasks
|
||||
agent.autotasks.all().delete()
|
||||
@@ -1051,8 +1006,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
self.assertEqual(policy.related_agents().count(), 0) # type: ignore
|
||||
self.assertEqual(agent.agentchecks.count(), 0) # type: ignore
|
||||
delete_task.assert_called()
|
||||
delete_task.reset_mock()
|
||||
|
||||
# delete agent tasks and reset
|
||||
agent.autotasks.all().delete()
|
||||
@@ -1074,8 +1027,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
self.assertEqual(policy.related_agents().count(), 0) # type: ignore
|
||||
self.assertEqual(agent.agentchecks.count(), 0) # type: ignore
|
||||
delete_task.assert_called()
|
||||
delete_task.reset_mock()
|
||||
|
||||
# delete agent tasks and reset
|
||||
agent.autotasks.all().delete()
|
||||
@@ -1103,11 +1054,82 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
self.assertEqual(policy.related_agents().count(), 0) # type: ignore
|
||||
self.assertEqual(agent.agentchecks.count(), 0) # type: ignore
|
||||
delete_task.assert_called()
|
||||
delete_task.reset_mock()
|
||||
|
||||
def test_removing_duplicate_pending_task_actions(self):
|
||||
pass
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_policy_inheritance_blocking(self, create_task):
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
baker.make_recipe("checks.memory_check", policy=policy)
|
||||
baker.make("autotasks.AutomatedTask", policy=policy)
|
||||
agent = baker.make_recipe("agents.agent", monitoring_type="server")
|
||||
|
||||
def test_creating_checks_with_assigned_tasks(self):
|
||||
pass
|
||||
core = CoreSettings.objects.first()
|
||||
core.server_policy = policy
|
||||
core.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
# should get policies from default policy
|
||||
self.assertTrue(agent.autotasks.all())
|
||||
self.assertTrue(agent.agentchecks.all())
|
||||
|
||||
# test client blocking inheritance
|
||||
agent.site.client.block_policy_inheritance = True
|
||||
agent.site.client.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
self.assertFalse(agent.autotasks.all())
|
||||
self.assertFalse(agent.agentchecks.all())
|
||||
|
||||
agent.site.client.server_policy = policy
|
||||
agent.site.client.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
# should get policies from client policy
|
||||
self.assertTrue(agent.autotasks.all())
|
||||
self.assertTrue(agent.agentchecks.all())
|
||||
|
||||
# test site blocking inheritance
|
||||
agent.site.block_policy_inheritance = True
|
||||
agent.site.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
self.assertFalse(agent.autotasks.all())
|
||||
self.assertFalse(agent.agentchecks.all())
|
||||
|
||||
agent.site.server_policy = policy
|
||||
agent.site.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
# should get policies from site policy
|
||||
self.assertTrue(agent.autotasks.all())
|
||||
self.assertTrue(agent.agentchecks.all())
|
||||
|
||||
# test agent blocking inheritance
|
||||
agent.block_policy_inheritance = True
|
||||
agent.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
self.assertFalse(agent.autotasks.all())
|
||||
self.assertFalse(agent.agentchecks.all())
|
||||
|
||||
agent.policy = policy
|
||||
agent.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
# should get policies from agent policy
|
||||
self.assertTrue(agent.autotasks.all())
|
||||
self.assertTrue(agent.agentchecks.all())
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from clients.models import Client
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from tacticalrmm.utils import notify_error
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
@@ -22,7 +21,6 @@ from .serializers import (
|
||||
PolicyTableSerializer,
|
||||
PolicyTaskStatusSerializer,
|
||||
)
|
||||
from .tasks import run_win_policy_autotask_task
|
||||
|
||||
|
||||
class GetAddPolicies(APIView):
|
||||
@@ -76,10 +74,10 @@ class GetUpdateDeletePolicy(APIView):
|
||||
class PolicySync(APIView):
|
||||
def post(self, request):
|
||||
if "policy" in request.data.keys():
|
||||
from automation.tasks import generate_agent_checks_from_policies_task
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
request.data["policy"], create_tasks=True
|
||||
generate_agent_checks_task.delay(
|
||||
policy=request.data["policy"], create_tasks=True
|
||||
)
|
||||
return Response("ok")
|
||||
|
||||
@@ -101,8 +99,9 @@ class PolicyAutoTask(APIView):
|
||||
|
||||
# bulk run win tasks associated with policy
|
||||
def put(self, request, task):
|
||||
tasks = AutomatedTask.objects.filter(parent_task=task)
|
||||
run_win_policy_autotask_task.delay([task.id for task in tasks])
|
||||
from .tasks import run_win_policy_autotasks_task
|
||||
|
||||
run_win_policy_autotasks_task.delay(task=task)
|
||||
return Response("Affected agent tasks will run shortly")
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-04 00:32
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0019_globalkvstore'),
|
||||
('scripts', '0007_script_args'),
|
||||
('autotasks', '0018_automatedtask_run_asap_after_missed'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='custom_field',
|
||||
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='autotask', to='core.customfield'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='retvalue',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='script',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='autoscript', to='scripts.script'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-21 02:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0019_auto_20210404_0032'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='sync_status',
|
||||
field=models.CharField(choices=[('synced', 'Synced With Agent'), ('notsynced', 'Waiting On Agent Checkin'), ('pendingdeletion', 'Pending Deletion on Agent'), ('initial', 'Initial Task Sync')], default='initial', max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-27 14:11
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0021_customfield_hide_in_ui'),
|
||||
('autotasks', '0020_auto_20210421_0226'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='custom_field',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='autotasks', to='core.customfield'),
|
||||
),
|
||||
]
|
||||
@@ -1,16 +1,19 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import random
|
||||
import string
|
||||
from typing import List
|
||||
|
||||
import pytz
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import DateTimeField
|
||||
from loguru import logger
|
||||
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from django.utils import timezone as djangotime
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
@@ -36,6 +39,7 @@ SYNC_STATUS_CHOICES = [
|
||||
("synced", "Synced With Agent"),
|
||||
("notsynced", "Waiting On Agent Checkin"),
|
||||
("pendingdeletion", "Pending Deletion on Agent"),
|
||||
("initial", "Initial Task Sync"),
|
||||
]
|
||||
|
||||
TASK_STATUS_CHOICES = [
|
||||
@@ -60,12 +64,19 @@ class AutomatedTask(BaseAuditModel):
|
||||
blank=True,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
custom_field = models.ForeignKey(
|
||||
"core.CustomField",
|
||||
related_name="autotasks",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="autoscript",
|
||||
on_delete=models.CASCADE,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
@@ -100,6 +111,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
parent_task = models.PositiveIntegerField(null=True, blank=True)
|
||||
win_task_name = models.CharField(max_length=255, null=True, blank=True)
|
||||
timeout = models.PositiveIntegerField(default=120)
|
||||
retvalue = models.TextField(null=True, blank=True)
|
||||
retcode = models.IntegerField(null=True, blank=True)
|
||||
stdout = models.TextField(null=True, blank=True)
|
||||
stderr = models.TextField(null=True, blank=True)
|
||||
@@ -110,7 +122,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
max_length=30, choices=TASK_STATUS_CHOICES, default="pending"
|
||||
)
|
||||
sync_status = models.CharField(
|
||||
max_length=100, choices=SYNC_STATUS_CHOICES, default="notsynced"
|
||||
max_length=100, choices=SYNC_STATUS_CHOICES, default="initial"
|
||||
)
|
||||
alert_severity = models.CharField(
|
||||
max_length=30, choices=SEVERITY_CHOICES, default="info"
|
||||
@@ -147,6 +159,31 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
return self.last_run
|
||||
|
||||
# These fields will be duplicated on the agent tasks that are managed by a policy
|
||||
@property
|
||||
def policy_fields_to_copy(self) -> List[str]:
|
||||
return [
|
||||
"alert_severity",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"dashboard_alert",
|
||||
"script",
|
||||
"script_args",
|
||||
"assigned_check",
|
||||
"name",
|
||||
"run_time_days",
|
||||
"run_time_minute",
|
||||
"run_time_bit_weekdays",
|
||||
"run_time_date",
|
||||
"task_type",
|
||||
"win_task_name",
|
||||
"timeout",
|
||||
"enabled",
|
||||
"remove_if_not_scheduled",
|
||||
"run_asap_after_missed",
|
||||
"custom_field",
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def generate_task_name():
|
||||
chars = string.ascii_letters
|
||||
@@ -160,7 +197,6 @@ class AutomatedTask(BaseAuditModel):
|
||||
return TaskSerializer(task).data
|
||||
|
||||
def create_policy_task(self, agent=None, policy=None):
|
||||
from .tasks import create_win_task_schedule
|
||||
|
||||
# if policy is present, then this task is being copied to another policy
|
||||
# if agent is present, then this task is being created on an agent from a policy
|
||||
@@ -177,15 +213,6 @@ class AutomatedTask(BaseAuditModel):
|
||||
assigned_check = agent.agentchecks.filter(
|
||||
parent_check=self.assigned_check.pk
|
||||
).first()
|
||||
# check was overriden by agent and we need to use that agents check
|
||||
else:
|
||||
if agent.agentchecks.filter(
|
||||
check_type=self.assigned_check.check_type, overriden_by_policy=True
|
||||
).exists():
|
||||
assigned_check = agent.agentchecks.filter(
|
||||
check_type=self.assigned_check.check_type,
|
||||
overriden_by_policy=True,
|
||||
).first()
|
||||
elif policy and self.assigned_check:
|
||||
if policy.policychecks.filter(name=self.assigned_check.name).exists():
|
||||
assigned_check = policy.policychecks.filter(
|
||||
@@ -201,27 +228,175 @@ class AutomatedTask(BaseAuditModel):
|
||||
policy=policy,
|
||||
managed_by_policy=bool(agent),
|
||||
parent_task=(self.pk if agent else None),
|
||||
alert_severity=self.alert_severity,
|
||||
email_alert=self.email_alert,
|
||||
text_alert=self.text_alert,
|
||||
dashboard_alert=self.dashboard_alert,
|
||||
script=self.script,
|
||||
script_args=self.script_args,
|
||||
assigned_check=assigned_check,
|
||||
name=self.name,
|
||||
run_time_days=self.run_time_days,
|
||||
run_time_minute=self.run_time_minute,
|
||||
run_time_bit_weekdays=self.run_time_bit_weekdays,
|
||||
run_time_date=self.run_time_date,
|
||||
task_type=self.task_type,
|
||||
win_task_name=self.win_task_name,
|
||||
timeout=self.timeout,
|
||||
enabled=self.enabled,
|
||||
remove_if_not_scheduled=self.remove_if_not_scheduled,
|
||||
run_asap_after_missed=self.run_asap_after_missed,
|
||||
)
|
||||
|
||||
create_win_task_schedule.delay(task.pk)
|
||||
for field in self.policy_fields_to_copy:
|
||||
setattr(task, field, getattr(self, field))
|
||||
|
||||
task.save()
|
||||
|
||||
task.create_task_on_agent()
|
||||
|
||||
def create_task_on_agent(self):
|
||||
from agents.models import Agent
|
||||
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
)
|
||||
|
||||
if self.task_type == "scheduled":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": self.run_time_bit_weekdays,
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
"hour": dt.datetime.strptime(self.run_time_minute, "%H:%M").hour,
|
||||
"min": dt.datetime.strptime(self.run_time_minute, "%H:%M").minute,
|
||||
},
|
||||
}
|
||||
|
||||
elif self.task_type == "runonce":
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(agent.timezone)
|
||||
task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone(
|
||||
pytz.utc
|
||||
)
|
||||
now = djangotime.now()
|
||||
if task_time_utc < now:
|
||||
self.run_time_date = now.astimezone(agent_tz).replace(
|
||||
tzinfo=pytz.utc
|
||||
) + djangotime.timedelta(minutes=5)
|
||||
self.save(update_fields=["run_time_date"])
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
"year": int(dt.datetime.strftime(self.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(self.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(self.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(self.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(self.run_time_date, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse(
|
||||
"1.4.7"
|
||||
):
|
||||
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||
|
||||
if self.remove_if_not_scheduled:
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
elif self.task_type == "checkfailure" or self.task_type == "manual":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
},
|
||||
}
|
||||
else:
|
||||
return "error"
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "initial"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in."
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully created")
|
||||
|
||||
return "ok"
|
||||
|
||||
def modify_task_on_agent(self):
|
||||
from agents.models import Agent
|
||||
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "enableschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": self.win_task_name,
|
||||
"enabled": self.enabled,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "notsynced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to modify scheduled task {self.name} on {agent.hostname}. It will try again on next agent checkin"
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully modified")
|
||||
|
||||
return "ok"
|
||||
|
||||
def delete_task_on_agent(self):
|
||||
from agents.models import Agent
|
||||
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": self.win_task_name},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
self.sync_status = "pendingdeletion"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"{agent.hostname} task {self.name} was successfully modified"
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.delete()
|
||||
logger.info(f"{agent.hostname} task {self.name} was deleted")
|
||||
|
||||
return "ok"
|
||||
|
||||
def run_win_task(self):
|
||||
from agents.models import Agent
|
||||
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
)
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False))
|
||||
return "ok"
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
return (
|
||||
|
||||
@@ -4,207 +4,46 @@ import random
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
from .models import AutomatedTask
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@app.task
|
||||
def create_win_task_schedule(pk, pending_action=False):
|
||||
def create_win_task_schedule(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if task.task_type == "scheduled":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": task.run_time_bit_weekdays,
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
"hour": dt.datetime.strptime(task.run_time_minute, "%H:%M").hour,
|
||||
"min": dt.datetime.strptime(task.run_time_minute, "%H:%M").minute,
|
||||
},
|
||||
}
|
||||
|
||||
elif task.task_type == "runonce":
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(task.agent.timezone)
|
||||
task_time_utc = task.run_time_date.replace(tzinfo=agent_tz).astimezone(pytz.utc)
|
||||
now = djangotime.now()
|
||||
if task_time_utc < now:
|
||||
task.run_time_date = now.astimezone(agent_tz).replace(
|
||||
tzinfo=pytz.utc
|
||||
) + djangotime.timedelta(minutes=5)
|
||||
task.save(update_fields=["run_time_date"])
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
"year": int(dt.datetime.strftime(task.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(task.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(task.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(task.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(task.run_time_date, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if task.run_asap_after_missed and pyver.parse(
|
||||
task.agent.version
|
||||
) >= pyver.parse("1.4.7"):
|
||||
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||
|
||||
if task.remove_if_not_scheduled:
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
elif task.task_type == "checkfailure" or task.task_type == "manual":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
},
|
||||
}
|
||||
else:
|
||||
return "error"
|
||||
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=task.agent,
|
||||
action_type="taskaction",
|
||||
details={"action": "taskcreate", "task_id": task.id},
|
||||
).save()
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
logger.error(
|
||||
f"Unable to create scheduled task {task.win_task_name} on {task.agent.hostname}. It will be created when the agent checks in."
|
||||
)
|
||||
return
|
||||
|
||||
# clear pending action since it was successful
|
||||
if pending_action:
|
||||
pendingaction = PendingAction.objects.get(pk=pending_action)
|
||||
pendingaction.status = "completed"
|
||||
pendingaction.save(update_fields=["status"])
|
||||
|
||||
task.sync_status = "synced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
logger.info(f"{task.agent.hostname} task {task.name} was successfully created")
|
||||
task.create_task_on_agent()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
def enable_or_disable_win_task(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
nats_data = {
|
||||
"func": "enableschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": task.win_task_name,
|
||||
"enabled": action,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data))
|
||||
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
PendingAction(
|
||||
agent=task.agent,
|
||||
action_type="taskaction",
|
||||
details={
|
||||
"action": "tasktoggle",
|
||||
"value": action,
|
||||
"task_id": task.id,
|
||||
},
|
||||
).save()
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
return
|
||||
|
||||
# clear pending action since it was successful
|
||||
if pending_action:
|
||||
pendingaction = PendingAction.objects.get(pk=pending_action)
|
||||
pendingaction.status = "completed"
|
||||
pendingaction.save(update_fields=["status"])
|
||||
|
||||
task.sync_status = "synced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
task.modify_task_on_agent()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def delete_win_task_schedule(pk, pending_action=False):
|
||||
def delete_win_task_schedule(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": task.win_task_name},
|
||||
}
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=task.agent,
|
||||
action_type="taskaction",
|
||||
details={"action": "taskdelete", "task_id": task.id},
|
||||
).save()
|
||||
task.sync_status = "pendingdeletion"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
return "timeout"
|
||||
|
||||
# complete pending action since it was successful
|
||||
if pending_action:
|
||||
pendingaction = PendingAction.objects.get(pk=pending_action)
|
||||
pendingaction.status = "completed"
|
||||
pendingaction.save(update_fields=["status"])
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
task.delete()
|
||||
task.delete_task_on_agent()
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def run_win_task(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
task.run_win_task()
|
||||
return "ok"
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ from unittest.mock import call, patch
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import AutomatedTask
|
||||
@@ -17,10 +16,10 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("automation.tasks.generate_agent_tasks_from_policies_task.delay")
|
||||
@patch("automation.tasks.generate_agent_autotasks_task.delay")
|
||||
@patch("autotasks.tasks.create_win_task_schedule.delay")
|
||||
def test_add_autotask(
|
||||
self, create_win_task_schedule, generate_agent_tasks_from_policies_task
|
||||
self, create_win_task_schedule, generate_agent_autotasks_task
|
||||
):
|
||||
url = "/tasks/automatedtasks/"
|
||||
|
||||
@@ -84,13 +83,13 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
"task_type": "manual",
|
||||
"assigned_check": None,
|
||||
},
|
||||
"policy": policy.id,
|
||||
"policy": policy.id, # type: ignore
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
generate_agent_tasks_from_policies_task.assert_called_with(policy.id)
|
||||
generate_agent_autotasks_task.assert_called_with(policy=policy.id) # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -106,14 +105,14 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
serializer = AutoTaskSerializer(agent)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("autotasks.tasks.enable_or_disable_win_task.delay")
|
||||
@patch("automation.tasks.update_policy_task_fields_task.delay")
|
||||
@patch("automation.tasks.update_policy_autotasks_fields_task.delay")
|
||||
def test_update_autotask(
|
||||
self, update_policy_task_fields_task, enable_or_disable_win_task
|
||||
self, update_policy_autotasks_fields_task, enable_or_disable_win_task
|
||||
):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
@@ -125,32 +124,32 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
resp = self.client.patch("/tasks/500/automatedtasks/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
url = f"/tasks/{agent_task.id}/automatedtasks/"
|
||||
url = f"/tasks/{agent_task.id}/automatedtasks/" # type: ignore
|
||||
|
||||
# test editing agent task
|
||||
data = {"enableordisable": False}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
enable_or_disable_win_task.assert_called_with(pk=agent_task.id, action=False)
|
||||
enable_or_disable_win_task.assert_called_with(pk=agent_task.id) # type: ignore
|
||||
|
||||
url = f"/tasks/{policy_task.id}/automatedtasks/"
|
||||
url = f"/tasks/{policy_task.id}/automatedtasks/" # type: ignore
|
||||
|
||||
# test editing policy task
|
||||
data = {"enableordisable": True}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
update_policy_task_fields_task.assert_called_with(
|
||||
policy_task.id, update_agent=True
|
||||
update_policy_autotasks_fields_task.assert_called_with(
|
||||
task=policy_task.id, update_agent=True # type: ignore
|
||||
)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
||||
@patch("automation.tasks.delete_policy_autotask_task.delay")
|
||||
@patch("automation.tasks.delete_policy_autotasks_task.delay")
|
||||
def test_delete_autotask(
|
||||
self, delete_policy_autotask_task, delete_win_task_schedule
|
||||
self, delete_policy_autotasks_task, delete_win_task_schedule
|
||||
):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
@@ -163,21 +162,22 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete agent task
|
||||
url = f"/tasks/{agent_task.id}/automatedtasks/"
|
||||
url = f"/tasks/{agent_task.id}/automatedtasks/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
delete_win_task_schedule.assert_called_with(pk=agent_task.id)
|
||||
delete_win_task_schedule.assert_called_with(pk=agent_task.id) # type: ignore
|
||||
|
||||
# test delete policy task
|
||||
url = f"/tasks/{policy_task.id}/automatedtasks/"
|
||||
url = f"/tasks/{policy_task.id}/automatedtasks/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
delete_policy_autotask_task.assert_called_with(policy_task.id)
|
||||
self.assertFalse(AutomatedTask.objects.filter(pk=policy_task.id)) # type: ignore
|
||||
delete_policy_autotasks_task.assert_called_with(task=policy_task.id) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_autotask(self, nats_cmd):
|
||||
@patch("autotasks.tasks.run_win_task.delay")
|
||||
def test_run_autotask(self, run_win_task):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
@@ -187,11 +187,10 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test run agent task
|
||||
url = f"/tasks/runwintask/{task.id}/"
|
||||
url = f"/tasks/runwintask/{task.id}/" # type: ignore
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "runtask", "taskpk": task.id}, wait=False)
|
||||
nats_cmd.reset_mock()
|
||||
run_win_task.assert_called()
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -284,9 +283,9 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
run_time_bit_weekdays=127,
|
||||
run_time_minute="21:55",
|
||||
)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
self.assertEqual(self.task1.sync_status, "initial")
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk).apply()
|
||||
self.assertEqual(nats_cmd.call_count, 1)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
@@ -301,29 +300,16 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"min": 55,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
timeout=5,
|
||||
)
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "synced")
|
||||
|
||||
nats_cmd.return_value = "timeout"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
|
||||
# test pending action
|
||||
self.pending_action = PendingAction.objects.create(
|
||||
agent=self.agent, action_type="taskaction"
|
||||
)
|
||||
self.assertEqual(self.pending_action.status, "pending")
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(
|
||||
pk=self.task1.pk, pending_action=self.pending_action.pk
|
||||
).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.pending_action = PendingAction.objects.get(pk=self.pending_action.pk)
|
||||
self.assertEqual(self.pending_action.status, "completed")
|
||||
self.assertEqual(self.task1.sync_status, "initial")
|
||||
|
||||
# test runonce with future date
|
||||
nats_cmd.reset_mock()
|
||||
@@ -337,7 +323,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
run_time_date=run_time_date,
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task2.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task2.pk).apply()
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
@@ -353,7 +339,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")),
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
timeout=5,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -369,7 +355,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
run_time_date=run_time_date,
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task3.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task3.pk).apply()
|
||||
self.task3 = AutomatedTask.objects.get(pk=self.task3.pk)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -385,7 +371,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
assigned_check=self.check,
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task4.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task4.pk).apply()
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
@@ -396,7 +382,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"name": task_name,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
timeout=5,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -410,7 +396,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="manual",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task5.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task5.pk).apply()
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
@@ -421,6 +407,6 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"name": task_name,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
timeout=5,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -1,28 +1,22 @@
|
||||
import asyncio
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.utils import get_bit_days, get_default_timezone, notify_error
|
||||
|
||||
from .models import AutomatedTask
|
||||
from .serializers import AutoTaskSerializer, TaskSerializer
|
||||
from .tasks import (
|
||||
create_win_task_schedule,
|
||||
delete_win_task_schedule,
|
||||
enable_or_disable_win_task,
|
||||
)
|
||||
|
||||
|
||||
class AddAutoTask(APIView):
|
||||
def post(self, request):
|
||||
from automation.models import Policy
|
||||
from automation.tasks import generate_agent_tasks_from_policies_task
|
||||
from automation.tasks import generate_agent_autotasks_task
|
||||
|
||||
from autotasks.tasks import create_win_task_schedule
|
||||
|
||||
data = request.data
|
||||
script = get_object_or_404(Script, pk=data["autotask"]["script"])
|
||||
@@ -47,7 +41,7 @@ class AddAutoTask(APIView):
|
||||
del data["autotask"]["run_time_days"]
|
||||
serializer = TaskSerializer(data=data["autotask"], partial=True, context=parent)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save(
|
||||
task = serializer.save(
|
||||
**parent,
|
||||
script=script,
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
@@ -55,11 +49,11 @@ class AddAutoTask(APIView):
|
||||
run_time_bit_weekdays=bit_weekdays,
|
||||
)
|
||||
|
||||
if not "policy" in data:
|
||||
create_win_task_schedule.delay(pk=obj.pk)
|
||||
if task.agent:
|
||||
create_win_task_schedule.delay(pk=task.pk)
|
||||
|
||||
if "policy" in data:
|
||||
generate_agent_tasks_from_policies_task.delay(data["policy"])
|
||||
elif task.policy:
|
||||
generate_agent_autotasks_task.delay(policy=task.policy.pk)
|
||||
|
||||
return Response("Task will be created shortly!")
|
||||
|
||||
@@ -75,7 +69,7 @@ class AutoTask(APIView):
|
||||
return Response(AutoTaskSerializer(agent, context=ctx).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
from automation.tasks import update_policy_task_fields_task
|
||||
from automation.tasks import update_policy_autotasks_fields_task
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
@@ -84,39 +78,44 @@ class AutoTask(APIView):
|
||||
serializer.save()
|
||||
|
||||
if task.policy:
|
||||
update_policy_task_fields_task.delay(task.pk)
|
||||
update_policy_autotasks_fields_task.delay(task=task.pk)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def patch(self, request, pk):
|
||||
from automation.tasks import update_policy_task_fields_task
|
||||
from automation.tasks import update_policy_autotasks_fields_task
|
||||
from autotasks.tasks import enable_or_disable_win_task
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if "enableordisable" in request.data:
|
||||
action = request.data["enableordisable"]
|
||||
|
||||
if not task.policy:
|
||||
enable_or_disable_win_task.delay(pk=task.pk, action=action)
|
||||
|
||||
else:
|
||||
update_policy_task_fields_task.delay(task.pk, update_agent=True)
|
||||
|
||||
task.enabled = action
|
||||
task.save(update_fields=["enabled"])
|
||||
action = "enabled" if action else "disabled"
|
||||
|
||||
if task.policy:
|
||||
update_policy_autotasks_fields_task.delay(
|
||||
task=task.pk, update_agent=True
|
||||
)
|
||||
elif task.agent:
|
||||
enable_or_disable_win_task.delay(pk=task.pk)
|
||||
|
||||
return Response(f"Task will be {action} shortly")
|
||||
|
||||
else:
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
def delete(self, request, pk):
|
||||
from automation.tasks import delete_policy_autotask_task
|
||||
from automation.tasks import delete_policy_autotasks_task
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if not task.policy:
|
||||
if task.agent:
|
||||
delete_win_task_schedule.delay(pk=task.pk)
|
||||
|
||||
if task.policy:
|
||||
delete_policy_autotask_task.delay(task.pk)
|
||||
elif task.policy:
|
||||
delete_policy_autotasks_task.delay(task=task.pk)
|
||||
task.delete()
|
||||
|
||||
return Response(f"{task.name} will be deleted shortly")
|
||||
@@ -124,6 +123,8 @@ class AutoTask(APIView):
|
||||
|
||||
@api_view()
|
||||
def run_task(request, pk):
|
||||
from autotasks.tasks import run_win_task
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
run_win_task.delay(pk=pk)
|
||||
return Response(f"{task.name} will now be run on {task.agent.hostname}")
|
||||
|
||||
@@ -4,17 +4,17 @@ import os
|
||||
import string
|
||||
from statistics import mean
|
||||
from typing import Any
|
||||
from packaging import version as pyver
|
||||
|
||||
import pytz
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from core.models import CoreSettings
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from core.models import CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
|
||||
from .utils import bytes2human
|
||||
|
||||
@@ -263,6 +263,42 @@ class Check(BaseAuditModel):
|
||||
"modified_time",
|
||||
]
|
||||
|
||||
@property
|
||||
def policy_fields_to_copy(self) -> list[str]:
|
||||
return [
|
||||
"warning_threshold",
|
||||
"error_threshold",
|
||||
"alert_severity",
|
||||
"name",
|
||||
"run_interval",
|
||||
"disk",
|
||||
"fails_b4_alert",
|
||||
"ip",
|
||||
"script",
|
||||
"script_args",
|
||||
"info_return_codes",
|
||||
"warning_return_codes",
|
||||
"timeout",
|
||||
"svc_name",
|
||||
"svc_display_name",
|
||||
"svc_policy_mode",
|
||||
"pass_if_start_pending",
|
||||
"pass_if_svc_not_exist",
|
||||
"restart_if_stopped",
|
||||
"log_name",
|
||||
"event_id",
|
||||
"event_id_is_wildcard",
|
||||
"event_type",
|
||||
"event_source",
|
||||
"event_message",
|
||||
"fail_when",
|
||||
"search_last_days",
|
||||
"number_of_events_b4_alert",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"dashboard_alert",
|
||||
]
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
|
||||
return (
|
||||
@@ -386,16 +422,20 @@ class Check(BaseAuditModel):
|
||||
|
||||
# ping checks
|
||||
elif self.check_type == "ping":
|
||||
success = ["Reply", "bytes", "time", "TTL"]
|
||||
output = data["output"]
|
||||
|
||||
if data["has_stdout"]:
|
||||
if all(x in output for x in success):
|
||||
self.status = "passing"
|
||||
else:
|
||||
if pyver.parse(self.agent.version) <= pyver.parse("1.5.2"):
|
||||
# DEPRECATED
|
||||
success = ["Reply", "bytes", "time", "TTL"]
|
||||
if data["has_stdout"]:
|
||||
if all(x in output for x in success):
|
||||
self.status = "passing"
|
||||
else:
|
||||
self.status = "failing"
|
||||
elif data["has_stderr"]:
|
||||
self.status = "failing"
|
||||
elif data["has_stderr"]:
|
||||
self.status = "failing"
|
||||
else:
|
||||
self.status = data["status"]
|
||||
|
||||
self.more_info = output
|
||||
self.save(update_fields=["more_info"])
|
||||
@@ -551,49 +591,23 @@ class Check(BaseAuditModel):
|
||||
|
||||
def create_policy_check(self, agent=None, policy=None):
|
||||
|
||||
if not agent and not policy or agent and policy:
|
||||
if (not agent and not policy) or (agent and policy):
|
||||
return
|
||||
|
||||
Check.objects.create(
|
||||
check = Check.objects.create(
|
||||
agent=agent,
|
||||
policy=policy,
|
||||
managed_by_policy=bool(agent),
|
||||
parent_check=(self.pk if agent else None),
|
||||
name=self.name,
|
||||
alert_severity=self.alert_severity,
|
||||
check_type=self.check_type,
|
||||
email_alert=self.email_alert,
|
||||
dashboard_alert=self.dashboard_alert,
|
||||
text_alert=self.text_alert,
|
||||
fails_b4_alert=self.fails_b4_alert,
|
||||
extra_details=self.extra_details,
|
||||
run_interval=self.run_interval,
|
||||
error_threshold=self.error_threshold,
|
||||
warning_threshold=self.warning_threshold,
|
||||
disk=self.disk,
|
||||
ip=self.ip,
|
||||
script=self.script,
|
||||
script_args=self.script_args,
|
||||
timeout=self.timeout,
|
||||
info_return_codes=self.info_return_codes,
|
||||
warning_return_codes=self.warning_return_codes,
|
||||
svc_name=self.svc_name,
|
||||
svc_display_name=self.svc_display_name,
|
||||
pass_if_start_pending=self.pass_if_start_pending,
|
||||
pass_if_svc_not_exist=self.pass_if_svc_not_exist,
|
||||
restart_if_stopped=self.restart_if_stopped,
|
||||
svc_policy_mode=self.svc_policy_mode,
|
||||
log_name=self.log_name,
|
||||
event_id=self.event_id,
|
||||
event_id_is_wildcard=self.event_id_is_wildcard,
|
||||
event_type=self.event_type,
|
||||
event_source=self.event_source,
|
||||
event_message=self.event_message,
|
||||
fail_when=self.fail_when,
|
||||
search_last_days=self.search_last_days,
|
||||
number_of_events_b4_alert=self.number_of_events_b4_alert,
|
||||
)
|
||||
|
||||
for field in self.policy_fields_to_copy:
|
||||
setattr(check, field, getattr(self, field))
|
||||
|
||||
check.save()
|
||||
|
||||
def is_duplicate(self, check):
|
||||
if self.check_type == "diskspace":
|
||||
return self.disk == check.disk
|
||||
@@ -633,12 +647,15 @@ class Check(BaseAuditModel):
|
||||
if self.error_threshold:
|
||||
text += f" Error Threshold: {self.error_threshold}%"
|
||||
|
||||
percent_used = [
|
||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||
][0]
|
||||
percent_free = 100 - percent_used
|
||||
try:
|
||||
percent_used = [
|
||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||
][0]
|
||||
percent_free = 100 - percent_used
|
||||
|
||||
body = subject + f" - Free: {percent_free}%, {text}"
|
||||
body = subject + f" - Free: {percent_free}%, {text}"
|
||||
except:
|
||||
body = subject + f" - Disk {self.disk} does not exist"
|
||||
|
||||
elif self.check_type == "script":
|
||||
|
||||
@@ -667,16 +684,7 @@ class Check(BaseAuditModel):
|
||||
body = subject + f" - Average memory usage: {avg}%, {text}"
|
||||
|
||||
elif self.check_type == "winsvc":
|
||||
|
||||
try:
|
||||
status = list(
|
||||
filter(lambda x: x["name"] == self.svc_name, self.agent.services)
|
||||
)[0]["status"]
|
||||
# catch services that don't exist if policy check
|
||||
except:
|
||||
status = "Unknown"
|
||||
|
||||
body = subject + f" - Status: {status.upper()}"
|
||||
body = subject + f" - Status: {self.more_info}"
|
||||
|
||||
elif self.check_type == "eventlog":
|
||||
|
||||
@@ -719,11 +727,15 @@ class Check(BaseAuditModel):
|
||||
if self.error_threshold:
|
||||
text += f" Error Threshold: {self.error_threshold}%"
|
||||
|
||||
percent_used = [
|
||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||
][0]
|
||||
percent_free = 100 - percent_used
|
||||
body = subject + f" - Free: {percent_free}%, {text}"
|
||||
try:
|
||||
percent_used = [
|
||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||
][0]
|
||||
percent_free = 100 - percent_used
|
||||
body = subject + f" - Free: {percent_free}%, {text}"
|
||||
except:
|
||||
body = subject + f" - Disk {self.disk} does not exist"
|
||||
|
||||
elif self.check_type == "script":
|
||||
body = subject + f" - Return code: {self.retcode}"
|
||||
elif self.check_type == "ping":
|
||||
@@ -741,10 +753,7 @@ class Check(BaseAuditModel):
|
||||
elif self.check_type == "memory":
|
||||
body = subject + f" - Average memory usage: {avg}%, {text}"
|
||||
elif self.check_type == "winsvc":
|
||||
status = list(
|
||||
filter(lambda x: x["name"] == self.svc_name, self.agent.services)
|
||||
)[0]["status"]
|
||||
body = subject + f" - Status: {status.upper()}"
|
||||
body = subject + f" - Status: {self.more_info}"
|
||||
elif self.check_type == "eventlog":
|
||||
body = subject
|
||||
|
||||
|
||||
@@ -14,6 +14,22 @@ class TestCheckViews(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_delete_agent_check(self):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
|
||||
resp = self.client.delete("/checks/500/check/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
url = f"/checks/{check.pk}/check/"
|
||||
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertFalse(agent.agentchecks.all())
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_disk_check(self):
|
||||
# setup data
|
||||
disk_check = baker.make_recipe("checks.diskspace_check")
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import asyncio
|
||||
from datetime import datetime as dt
|
||||
|
||||
from agents.models import Agent
|
||||
from automation.models import Policy
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
@@ -8,14 +10,6 @@ from packaging import version as pyver
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from automation.models import Policy
|
||||
from automation.tasks import (
|
||||
delete_policy_check_task,
|
||||
generate_agent_checks_from_policies_task,
|
||||
update_policy_check_fields_task,
|
||||
)
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
@@ -25,6 +19,8 @@ from .serializers import CheckHistorySerializer, CheckSerializer
|
||||
|
||||
class AddCheck(APIView):
|
||||
def post(self, request):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
policy = None
|
||||
agent = None
|
||||
|
||||
@@ -53,28 +49,30 @@ class AddCheck(APIView):
|
||||
data=request.data["check"], partial=True, context=parent
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save(**parent, script=script)
|
||||
new_check = serializer.save(**parent, script=script)
|
||||
|
||||
# Generate policy Checks
|
||||
if policy:
|
||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk)
|
||||
generate_agent_checks_task.delay(policy=policy.pk)
|
||||
elif agent:
|
||||
checks = agent.agentchecks.filter( # type: ignore
|
||||
check_type=obj.check_type, managed_by_policy=True
|
||||
check_type=new_check.check_type, managed_by_policy=True
|
||||
)
|
||||
|
||||
# Should only be one
|
||||
duplicate_check = [check for check in checks if check.is_duplicate(obj)]
|
||||
duplicate_check = [
|
||||
check for check in checks if check.is_duplicate(new_check)
|
||||
]
|
||||
|
||||
if duplicate_check:
|
||||
policy = Check.objects.get(pk=duplicate_check[0].parent_check).policy
|
||||
if policy.enforced:
|
||||
obj.overriden_by_policy = True
|
||||
obj.save()
|
||||
new_check.overriden_by_policy = True
|
||||
new_check.save()
|
||||
else:
|
||||
duplicate_check[0].delete()
|
||||
|
||||
return Response(f"{obj.readable_desc} was added!")
|
||||
return Response(f"{new_check.readable_desc} was added!")
|
||||
|
||||
|
||||
class GetUpdateDeleteCheck(APIView):
|
||||
@@ -83,10 +81,17 @@ class GetUpdateDeleteCheck(APIView):
|
||||
return Response(CheckSerializer(check).data)
|
||||
|
||||
def patch(self, request, pk):
|
||||
from automation.tasks import (
|
||||
update_policy_check_fields_task,
|
||||
)
|
||||
|
||||
check = get_object_or_404(Check, pk=pk)
|
||||
|
||||
# remove fields that should not be changed when editing a check from the frontend
|
||||
if "check_alert" not in request.data.keys():
|
||||
if (
|
||||
"check_alert" not in request.data.keys()
|
||||
and "check_reset" not in request.data.keys()
|
||||
):
|
||||
[request.data.pop(i) for i in check.non_editable_fields]
|
||||
|
||||
# set event id to 0 if wildcard because it needs to be an integer field for db
|
||||
@@ -102,31 +107,32 @@ class GetUpdateDeleteCheck(APIView):
|
||||
|
||||
serializer = CheckSerializer(instance=check, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
check = serializer.save()
|
||||
|
||||
# resolve any alerts that are open
|
||||
if "check_reset" in request.data.keys():
|
||||
if check.alert.filter(resolved=False).exists():
|
||||
check.alert.get(resolved=False).resolve()
|
||||
|
||||
# Update policy check fields
|
||||
if check.policy:
|
||||
update_policy_check_fields_task(checkpk=pk)
|
||||
update_policy_check_fields_task.delay(check=check.pk)
|
||||
|
||||
return Response(f"{obj.readable_desc} was edited!")
|
||||
return Response(f"{check.readable_desc} was edited!")
|
||||
|
||||
def delete(self, request, pk):
|
||||
check = get_object_or_404(Check, pk=pk)
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
check_pk = check.pk
|
||||
policy_pk = None
|
||||
if check.policy:
|
||||
policy_pk = check.policy.pk
|
||||
check = get_object_or_404(Check, pk=pk)
|
||||
|
||||
check.delete()
|
||||
|
||||
# Policy check deleted
|
||||
if check.policy:
|
||||
delete_policy_check_task.delay(checkpk=check_pk)
|
||||
Check.objects.filter(managed_by_policy=True, parent_check=pk).delete()
|
||||
|
||||
# Re-evaluate agent checks is policy was enforced
|
||||
if check.policy.enforced:
|
||||
generate_agent_checks_from_policies_task.delay(policypk=policy_pk)
|
||||
generate_agent_checks_task.delay(policy=check.policy)
|
||||
|
||||
# Agent check deleted
|
||||
elif check.agent:
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-17 01:25
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0016_auto_20210329_1827'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='client',
|
||||
name='block_policy_inheritance',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='block_policy_inheritance',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -9,6 +9,7 @@ from logs.models import BaseAuditModel
|
||||
|
||||
class Client(BaseAuditModel):
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
block_policy_inheritance = models.BooleanField(default=False)
|
||||
workstation_policy = models.ForeignKey(
|
||||
"automation.Policy",
|
||||
related_name="workstation_clients",
|
||||
@@ -34,30 +35,29 @@ class Client(BaseAuditModel):
|
||||
|
||||
def save(self, *args, **kw):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_by_location_task
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old client if exists
|
||||
old_client = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kw)
|
||||
|
||||
# check if server polcies have changed and initiate task to reapply policies if so
|
||||
if old_client and old_client.server_policy != self.server_policy:
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": self.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_client:
|
||||
if (
|
||||
(old_client.server_policy != self.server_policy)
|
||||
or (old_client.workstation_policy != self.workstation_policy)
|
||||
or (
|
||||
old_client.block_policy_inheritance != self.block_policy_inheritance
|
||||
)
|
||||
):
|
||||
|
||||
# check if workstation polcies have changed and initiate task to reapply policies if so
|
||||
if old_client and old_client.workstation_policy != self.workstation_policy:
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": self.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_task.delay(
|
||||
client=self.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if old_client and old_client.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
if old_client.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
@@ -65,6 +65,10 @@ class Client(BaseAuditModel):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def agent_count(self) -> int:
|
||||
return Agent.objects.filter(site__client=self).count()
|
||||
|
||||
@property
|
||||
def has_maintenanace_mode_agents(self):
|
||||
return (
|
||||
@@ -86,16 +90,24 @@ class Client(BaseAuditModel):
|
||||
.prefetch_related("agentchecks")
|
||||
)
|
||||
|
||||
failing = 0
|
||||
data = {"error": False, "warning": False}
|
||||
|
||||
for agent in agents:
|
||||
if agent.checks["has_failing_checks"]:
|
||||
failing += 1
|
||||
|
||||
if agent.checks["warning"]:
|
||||
data["warning"] = True
|
||||
|
||||
if agent.checks["failing"]:
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
failing += 1
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
return failing > 0
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def serialize(client):
|
||||
@@ -108,6 +120,7 @@ class Client(BaseAuditModel):
|
||||
class Site(BaseAuditModel):
|
||||
client = models.ForeignKey(Client, related_name="sites", on_delete=models.CASCADE)
|
||||
name = models.CharField(max_length=255)
|
||||
block_policy_inheritance = models.BooleanField(default=False)
|
||||
workstation_policy = models.ForeignKey(
|
||||
"automation.Policy",
|
||||
related_name="workstation_sites",
|
||||
@@ -133,30 +146,24 @@ class Site(BaseAuditModel):
|
||||
|
||||
def save(self, *args, **kw):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_by_location_task
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old client if exists
|
||||
old_site = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(*args, **kw)
|
||||
|
||||
# check if server polcies have changed and initiate task to reapply policies if so
|
||||
if old_site and old_site.server_policy != self.server_policy:
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": self.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_site:
|
||||
if (
|
||||
(old_site.server_policy != self.server_policy)
|
||||
or (old_site.workstation_policy != self.workstation_policy)
|
||||
or (old_site.block_policy_inheritance != self.block_policy_inheritance)
|
||||
):
|
||||
|
||||
# check if workstation polcies have changed and initiate task to reapply policies if so
|
||||
if old_site and old_site.workstation_policy != self.workstation_policy:
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": self.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_task.delay(site=self.pk, create_tasks=True)
|
||||
|
||||
if old_site and old_site.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
if old_site.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
@@ -165,6 +172,10 @@ class Site(BaseAuditModel):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def agent_count(self) -> int:
|
||||
return Agent.objects.filter(site=self).count()
|
||||
|
||||
@property
|
||||
def has_maintenanace_mode_agents(self):
|
||||
return Agent.objects.filter(site=self, maintenance_mode=True).count() > 0
|
||||
@@ -184,16 +195,24 @@ class Site(BaseAuditModel):
|
||||
.prefetch_related("agentchecks")
|
||||
)
|
||||
|
||||
failing = 0
|
||||
data = {"error": False, "warning": False}
|
||||
|
||||
for agent in agents:
|
||||
|
||||
if agent.checks["has_failing_checks"]:
|
||||
failing += 1
|
||||
if agent.checks["warning"]:
|
||||
data["warning"] = True
|
||||
|
||||
if agent.checks["failing"]:
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
failing += 1
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
return failing > 0
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def serialize(site):
|
||||
|
||||
@@ -25,6 +25,7 @@ class SiteCustomFieldSerializer(ModelSerializer):
|
||||
class SiteSerializer(ModelSerializer):
|
||||
client_name = ReadOnlyField(source="client.name")
|
||||
custom_fields = SiteCustomFieldSerializer(many=True, read_only=True)
|
||||
agent_count = ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Site
|
||||
@@ -37,6 +38,8 @@ class SiteSerializer(ModelSerializer):
|
||||
"client_name",
|
||||
"client",
|
||||
"custom_fields",
|
||||
"agent_count",
|
||||
"block_policy_inheritance",
|
||||
)
|
||||
|
||||
def validate(self, val):
|
||||
@@ -68,6 +71,7 @@ class ClientCustomFieldSerializer(ModelSerializer):
|
||||
class ClientSerializer(ModelSerializer):
|
||||
sites = SiteSerializer(many=True, read_only=True)
|
||||
custom_fields = ClientCustomFieldSerializer(many=True, read_only=True)
|
||||
agent_count = ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Client
|
||||
@@ -77,8 +81,10 @@ class ClientSerializer(ModelSerializer):
|
||||
"server_policy",
|
||||
"workstation_policy",
|
||||
"alert_template",
|
||||
"block_policy_inheritance",
|
||||
"sites",
|
||||
"custom_fields",
|
||||
"agent_count",
|
||||
)
|
||||
|
||||
def validate(self, val):
|
||||
@@ -95,7 +101,6 @@ class SiteTreeSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = "__all__"
|
||||
ordering = ("failing_checks",)
|
||||
|
||||
|
||||
class ClientTreeSerializer(ModelSerializer):
|
||||
@@ -106,7 +111,6 @@ class ClientTreeSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = "__all__"
|
||||
ordering = ("failing_checks",)
|
||||
|
||||
|
||||
class DeploymentSerializer(ModelSerializer):
|
||||
|
||||
@@ -179,13 +179,9 @@ class TestClientViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
def test_delete_client(self, task1, task2):
|
||||
def test_delete_client(self):
|
||||
from agents.models import Agent
|
||||
|
||||
task1.return_value = "ok"
|
||||
task2.return_value = "ok"
|
||||
# setup data
|
||||
client_to_delete = baker.make("clients.Client")
|
||||
client_to_move = baker.make("clients.Client")
|
||||
@@ -352,13 +348,9 @@ class TestClientViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
def test_delete_site(self, task1, task2):
|
||||
def test_delete_site(self):
|
||||
from agents.models import Agent
|
||||
|
||||
task1.return_value = "ok"
|
||||
task2.return_value = "ok"
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
site_to_delete = baker.make("clients.Site", client=client)
|
||||
|
||||
@@ -111,7 +111,7 @@ class GetUpdateClient(APIView):
|
||||
|
||||
class DeleteClient(APIView):
|
||||
def delete(self, request, pk, sitepk):
|
||||
from automation.tasks import generate_all_agent_checks_task
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
agents = Agent.objects.filter(site__client=client)
|
||||
@@ -124,8 +124,7 @@ class DeleteClient(APIView):
|
||||
site = get_object_or_404(Site, pk=sitepk)
|
||||
agents.update(site=site)
|
||||
|
||||
generate_all_agent_checks_task.delay("workstation", create_tasks=True)
|
||||
generate_all_agent_checks_task.delay("server", create_tasks=True)
|
||||
generate_agent_checks_task.delay(all=True, create_tasks=True)
|
||||
|
||||
client.delete()
|
||||
return Response(f"{client.name} was deleted!")
|
||||
@@ -207,7 +206,7 @@ class GetUpdateSite(APIView):
|
||||
|
||||
class DeleteSite(APIView):
|
||||
def delete(self, request, pk, sitepk):
|
||||
from automation.tasks import generate_all_agent_checks_task
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
if site.client.sites.count() == 1:
|
||||
@@ -224,8 +223,7 @@ class DeleteSite(APIView):
|
||||
|
||||
agents.update(site=agent_site)
|
||||
|
||||
generate_all_agent_checks_task.delay("workstation", create_tasks=True)
|
||||
generate_all_agent_checks_task.delay("server", create_tasks=True)
|
||||
generate_agent_checks_task.delay(all=True, create_tasks=True)
|
||||
|
||||
site.delete()
|
||||
return Response(f"{site.name} was deleted!")
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import CoreSettings, CustomField
|
||||
from .models import CodeSignToken, CoreSettings, CustomField
|
||||
|
||||
admin.site.register(CoreSettings)
|
||||
admin.site.register(CustomField)
|
||||
admin.site.register(CodeSignToken)
|
||||
|
||||
@@ -9,6 +9,9 @@ $rdp = rdpchange
|
||||
$ping = pingchange
|
||||
$auth = '"tokenchange"'
|
||||
$downloadlink = 'downloadchange'
|
||||
$apilink = $downloadlink.split('/')
|
||||
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
|
||||
$serviceName = 'tacticalagent'
|
||||
If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
|
||||
@@ -45,24 +48,35 @@ If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
|
||||
# pass
|
||||
}
|
||||
|
||||
Try
|
||||
{
|
||||
Invoke-WebRequest -Uri $downloadlink -OutFile $OutPath\$output
|
||||
Start-Process -FilePath $OutPath\$output -ArgumentList ('/VERYSILENT /SUPPRESSMSGBOXES') -Wait
|
||||
write-host ('Extracting...')
|
||||
Start-Sleep -s 5
|
||||
Start-Process -FilePath "C:\Program Files\TacticalAgent\tacticalrmm.exe" -ArgumentList $installArgs -Wait
|
||||
exit 0
|
||||
}
|
||||
Catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
$FailedItem = $_.Exception.ItemName
|
||||
Write-Error -Message "$ErrorMessage $FailedItem"
|
||||
exit 1
|
||||
}
|
||||
Finally
|
||||
{
|
||||
Remove-Item -Path $OutPath\$output
|
||||
$X = 0
|
||||
do {
|
||||
Write-Output "Waiting for network"
|
||||
Start-Sleep -s 5
|
||||
$X += 1
|
||||
} until(($connectreult = Test-NetConnection $apilink[2] -Port 443 | ? { $_.TcpTestSucceeded }) -or $X -eq 3)
|
||||
|
||||
if ($connectreult.TcpTestSucceeded -eq $true){
|
||||
Try
|
||||
{
|
||||
Invoke-WebRequest -Uri $downloadlink -OutFile $OutPath\$output
|
||||
Start-Process -FilePath $OutPath\$output -ArgumentList ('/VERYSILENT /SUPPRESSMSGBOXES') -Wait
|
||||
write-host ('Extracting...')
|
||||
Start-Sleep -s 5
|
||||
Start-Process -FilePath "C:\Program Files\TacticalAgent\tacticalrmm.exe" -ArgumentList $installArgs -Wait
|
||||
exit 0
|
||||
}
|
||||
Catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
$FailedItem = $_.Exception.ItemName
|
||||
Write-Error -Message "$ErrorMessage $FailedItem"
|
||||
exit 1
|
||||
}
|
||||
Finally
|
||||
{
|
||||
Remove-Item -Path $OutPath\$output
|
||||
}
|
||||
} else {
|
||||
Write-Output "Unable to connect to server"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
from scripts.models import Script
|
||||
from logs.models import PendingAction
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -29,5 +25,8 @@ class Command(BaseCommand):
|
||||
self.style.SUCCESS(f"Migrated disks on {agent.hostname}")
|
||||
)
|
||||
|
||||
# remove task pending actions. deprecated 4/20/2021
|
||||
PendingAction.objects.filter(action_type="taskaction").delete()
|
||||
|
||||
# load community scripts into the db
|
||||
Script.load_community_scripts()
|
||||
|
||||
20
api/tacticalrmm/core/migrations/0019_codesigntoken.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 3.2 on 2021-04-13 05:41
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0018_auto_20210329_1709'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CodeSignToken',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('token', models.CharField(blank=True, max_length=255, null=True)),
|
||||
],
|
||||
),
|
||||
]
|
||||
21
api/tacticalrmm/core/migrations/0019_globalkvstore.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-04 00:32
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0018_auto_20210329_1709'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='GlobalKVStore',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=25)),
|
||||
('value', models.TextField()),
|
||||
],
|
||||
),
|
||||
]
|
||||
14
api/tacticalrmm/core/migrations/0020_merge_20210415_0132.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-15 01:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0019_codesigntoken'),
|
||||
('core', '0019_globalkvstore'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-24 23:56
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0020_merge_20210415_0132'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='customfield',
|
||||
name='hide_in_ui',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -79,7 +79,7 @@ class CoreSettings(BaseAuditModel):
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_all_agent_checks_task
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
if not self.pk and CoreSettings.objects.exists():
|
||||
raise ValidationError("There can only be one CoreSettings instance")
|
||||
@@ -97,14 +97,10 @@ class CoreSettings(BaseAuditModel):
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
# check if server polcies have changed and initiate task to reapply policies if so
|
||||
if old_settings and old_settings.server_policy != self.server_policy:
|
||||
generate_all_agent_checks_task.delay(mon_type="server", create_tasks=True)
|
||||
|
||||
# check if workstation polcies have changed and initiate task to reapply policies if so
|
||||
if old_settings and old_settings.workstation_policy != self.workstation_policy:
|
||||
generate_all_agent_checks_task.delay(
|
||||
mon_type="workstation", create_tasks=True
|
||||
)
|
||||
if (old_settings and old_settings.server_policy != self.server_policy) or (
|
||||
old_settings and old_settings.workstation_policy != self.workstation_policy
|
||||
):
|
||||
generate_agent_checks_task.delay(all=True, create_tasks=True)
|
||||
|
||||
if old_settings and old_settings.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
@@ -251,6 +247,7 @@ class CustomField(models.Model):
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
hide_in_ui = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
unique_together = (("model", "name"),)
|
||||
@@ -266,3 +263,69 @@ class CustomField(models.Model):
|
||||
return self.default_value_bool
|
||||
else:
|
||||
return self.default_value_string
|
||||
|
||||
|
||||
class CodeSignToken(models.Model):
|
||||
token = models.CharField(max_length=255, null=True, blank=True)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.pk and CodeSignToken.objects.exists():
|
||||
raise ValidationError("There can only be one CodeSignToken instance")
|
||||
|
||||
super(CodeSignToken, self).save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return "Code signing token"
|
||||
|
||||
|
||||
class GlobalKVStore(models.Model):
|
||||
name = models.CharField(max_length=25)
|
||||
value = models.TextField()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
RUN_ON_CHOICES = (
|
||||
("client", "Client"),
|
||||
("site", "Site"),
|
||||
("agent", "Agent"),
|
||||
("once", "Once"),
|
||||
)
|
||||
|
||||
SCHEDULE_CHOICES = (("daily", "Daily"), ("weekly", "Weekly"), ("monthly", "Monthly"))
|
||||
|
||||
|
||||
""" class GlobalTask(models.Model):
|
||||
script = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="script",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
custom_field = models.OneToOneField(
|
||||
"core.CustomField",
|
||||
related_name="globaltask",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
timeout = models.PositiveIntegerField(default=120)
|
||||
retcode = models.IntegerField(null=True, blank=True)
|
||||
retvalue = models.TextField(null=True, blank=True)
|
||||
stdout = models.TextField(null=True, blank=True)
|
||||
stderr = models.TextField(null=True, blank=True)
|
||||
execution_time = models.CharField(max_length=100, default="0.0000")
|
||||
run_schedule = models.CharField(
|
||||
max_length=25, choices=SCHEDULE_CHOICES, default="once"
|
||||
)
|
||||
run_on = models.CharField(
|
||||
max_length=25, choices=RUN_ON_CHOICES, default="once"
|
||||
) """
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from .models import CoreSettings, CustomField
|
||||
from .models import CodeSignToken, CoreSettings, CustomField, GlobalKVStore
|
||||
|
||||
|
||||
class CoreSettingsSerializer(serializers.ModelSerializer):
|
||||
@@ -27,3 +27,15 @@ class CustomFieldSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CustomField
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class CodeSignTokenSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CodeSignToken
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class KeyStoreSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = GlobalKVStore
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import requests
|
||||
from channels.db import database_sync_to_async
|
||||
from channels.testing import WebsocketCommunicator
|
||||
from model_bakery import baker
|
||||
@@ -7,11 +8,33 @@ from model_bakery import baker
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .consumers import DashInfo
|
||||
from .models import CoreSettings, CustomField
|
||||
from .serializers import CustomFieldSerializer
|
||||
from .models import CoreSettings, CustomField, GlobalKVStore
|
||||
from .serializers import CustomFieldSerializer, KeyStoreSerializer
|
||||
from .tasks import core_maintenance_tasks
|
||||
|
||||
|
||||
class TestCodeSign(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.authenticate()
|
||||
self.url = "/core/codesign/"
|
||||
|
||||
def test_get_codesign(self):
|
||||
r = self.client.get(self.url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", self.url)
|
||||
|
||||
@patch("requests.post")
|
||||
def test_edit_codesign_timeout(self, mock_post):
|
||||
mock_post.side_effect = requests.exceptions.ConnectionError()
|
||||
data = {"token": "token123"}
|
||||
r = self.client.patch(self.url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("patch", self.url)
|
||||
|
||||
|
||||
class TestConsumers(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
@@ -65,8 +88,8 @@ class TestCoreTasks(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
def test_edit_coresettings(self, generate_all_agent_checks_task):
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_edit_coresettings(self, generate_agent_checks_task):
|
||||
url = "/core/editsettings/"
|
||||
|
||||
# setup
|
||||
@@ -83,7 +106,7 @@ class TestCoreTasks(TacticalTestCase):
|
||||
)
|
||||
self.assertEqual(CoreSettings.objects.first().mesh_token, data["mesh_token"])
|
||||
|
||||
generate_all_agent_checks_task.assert_not_called()
|
||||
generate_agent_checks_task.assert_not_called()
|
||||
|
||||
# test adding policy
|
||||
data = {
|
||||
@@ -97,9 +120,9 @@ class TestCoreTasks(TacticalTestCase):
|
||||
CoreSettings.objects.first().workstation_policy.id, policies[0].id # type: ignore
|
||||
)
|
||||
|
||||
self.assertEqual(generate_all_agent_checks_task.call_count, 2)
|
||||
generate_agent_checks_task.assert_called_once()
|
||||
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_agent_checks_task.reset_mock()
|
||||
|
||||
# test remove policy
|
||||
data = {
|
||||
@@ -109,7 +132,7 @@ class TestCoreTasks(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(CoreSettings.objects.first().workstation_policy, None)
|
||||
|
||||
self.assertEqual(generate_all_agent_checks_task.call_count, 1)
|
||||
self.assertEqual(generate_agent_checks_task.call_count, 1)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@@ -250,3 +273,61 @@ class TestCoreTasks(TacticalTestCase):
|
||||
self.assertFalse(CustomField.objects.filter(pk=custom_field.id).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_keystore(self):
|
||||
url = "/core/keystore/"
|
||||
|
||||
# setup
|
||||
keys = baker.make("core.GlobalKVStore", _quantity=2)
|
||||
|
||||
r = self.client.get(url)
|
||||
serializer = KeyStoreSerializer(keys, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.data), 2) # type: ignore
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_keystore(self):
|
||||
url = "/core/keystore/"
|
||||
|
||||
data = {"name": "test", "value": "text"}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_update_keystore(self):
|
||||
# setup
|
||||
key = baker.make("core.GlobalKVStore")
|
||||
|
||||
# test not found
|
||||
r = self.client.put("/core/keystore/500/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/core/keystore/{key.id}/" # type: ignore
|
||||
data = {"name": "test", "value": "text"}
|
||||
r = self.client.put(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
new_key = GlobalKVStore.objects.get(pk=key.id) # type: ignore
|
||||
self.assertEqual(new_key.name, data["name"])
|
||||
self.assertEqual(new_key.value, data["value"])
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_keystore(self):
|
||||
# setup
|
||||
key = baker.make("core.GlobalKVStore")
|
||||
|
||||
# test not found
|
||||
r = self.client.delete("/core/keystore/500/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/core/keystore/{key.id}/" # type: ignore
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.assertFalse(GlobalKVStore.objects.filter(pk=key.id).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@@ -12,4 +12,7 @@ urlpatterns = [
|
||||
path("servermaintenance/", views.server_maintenance),
|
||||
path("customfields/", views.GetAddCustomFields.as_view()),
|
||||
path("customfields/<int:pk>/", views.GetUpdateDeleteCustomFields.as_view()),
|
||||
path("codesign/", views.CodeSign.as_view()),
|
||||
path("keystore/", views.GetAddKeyStore.as_view()),
|
||||
path("keystore/<int:pk>/", views.UpdateDeleteKeyStore.as_view()),
|
||||
]
|
||||
|
||||
@@ -11,8 +11,13 @@ from rest_framework.views import APIView
|
||||
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import CoreSettings, CustomField
|
||||
from .serializers import CoreSettingsSerializer, CustomFieldSerializer
|
||||
from .models import CodeSignToken, CoreSettings, CustomField, GlobalKVStore
|
||||
from .serializers import (
|
||||
CodeSignTokenSerializer,
|
||||
CoreSettingsSerializer,
|
||||
CustomFieldSerializer,
|
||||
KeyStoreSerializer,
|
||||
)
|
||||
|
||||
|
||||
class UploadMeshAgent(APIView):
|
||||
@@ -57,14 +62,20 @@ def version(request):
|
||||
|
||||
@api_view()
|
||||
def dashboard_info(request):
|
||||
from tacticalrmm.utils import get_latest_trmm_ver
|
||||
|
||||
return Response(
|
||||
{
|
||||
"trmm_version": settings.TRMM_VERSION,
|
||||
"latest_trmm_ver": get_latest_trmm_ver(),
|
||||
"dark_mode": request.user.dark_mode,
|
||||
"show_community_scripts": request.user.show_community_scripts,
|
||||
"dbl_click_action": request.user.agent_dblclick_action,
|
||||
"default_agent_tbl_tab": request.user.default_agent_tbl_tab,
|
||||
"client_tree_sort": request.user.client_tree_sort,
|
||||
"client_tree_splitter": request.user.client_tree_splitter,
|
||||
"loading_bar_color": request.user.loading_bar_color,
|
||||
"no_code_sign": hasattr(settings, "NOCODESIGN") and settings.NOCODESIGN,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -177,3 +188,94 @@ class GetUpdateDeleteCustomFields(APIView):
|
||||
get_object_or_404(CustomField, pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class CodeSign(APIView):
|
||||
def get(self, request):
|
||||
token = CodeSignToken.objects.first()
|
||||
return Response(CodeSignTokenSerializer(token).data)
|
||||
|
||||
def patch(self, request):
|
||||
import requests
|
||||
|
||||
errors = []
|
||||
for url in settings.EXE_GEN_URLS:
|
||||
try:
|
||||
r = requests.post(
|
||||
f"{url}/api/v1/checktoken",
|
||||
json={"token": request.data["token"]},
|
||||
headers={"Content-type": "application/json"},
|
||||
timeout=15,
|
||||
)
|
||||
except Exception as e:
|
||||
errors.append(str(e))
|
||||
else:
|
||||
errors = []
|
||||
break
|
||||
|
||||
if errors:
|
||||
return notify_error(", ".join(errors))
|
||||
|
||||
if r.status_code == 400 or r.status_code == 401: # type: ignore
|
||||
return notify_error(r.json()["ret"]) # type: ignore
|
||||
elif r.status_code == 200: # type: ignore
|
||||
t = CodeSignToken.objects.first()
|
||||
if t is None:
|
||||
CodeSignToken.objects.create(token=request.data["token"])
|
||||
else:
|
||||
serializer = CodeSignTokenSerializer(instance=t, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("Token was saved")
|
||||
|
||||
try:
|
||||
ret = r.json()["ret"] # type: ignore
|
||||
except:
|
||||
ret = "Something went wrong"
|
||||
return notify_error(ret)
|
||||
|
||||
def post(self, request):
|
||||
from agents.models import Agent
|
||||
from agents.tasks import force_code_sign
|
||||
|
||||
err = "A valid token must be saved first"
|
||||
try:
|
||||
t = CodeSignToken.objects.first().token
|
||||
except:
|
||||
return notify_error(err)
|
||||
|
||||
if t is None or t == "":
|
||||
return notify_error(err)
|
||||
|
||||
pks: list[int] = list(Agent.objects.only("pk").values_list("pk", flat=True))
|
||||
force_code_sign.delay(pks=pks)
|
||||
return Response("Agents will be code signed shortly")
|
||||
|
||||
|
||||
class GetAddKeyStore(APIView):
|
||||
def get(self, request):
|
||||
keys = GlobalKVStore.objects.all()
|
||||
return Response(KeyStoreSerializer(keys, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
serializer = KeyStoreSerializer(data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class UpdateDeleteKeyStore(APIView):
|
||||
def put(self, request, pk):
|
||||
key = get_object_or_404(GlobalKVStore, pk=pk)
|
||||
|
||||
serializer = KeyStoreSerializer(instance=key, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
get_object_or_404(GlobalKVStore, pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -7,7 +7,7 @@ from tacticalrmm.middleware import get_debug_info, get_username
|
||||
|
||||
ACTION_TYPE_CHOICES = [
|
||||
("schedreboot", "Scheduled Reboot"),
|
||||
("taskaction", "Scheduled Task Action"),
|
||||
("taskaction", "Scheduled Task Action"), # deprecated
|
||||
("agentupdate", "Agent Update"),
|
||||
("chocoinstall", "Chocolatey Software Install"),
|
||||
]
|
||||
@@ -42,13 +42,6 @@ AUDIT_OBJECT_TYPE_CHOICES = [
|
||||
("bulk", "Bulk"),
|
||||
]
|
||||
|
||||
# taskaction details format
|
||||
# {
|
||||
# "action": "taskcreate" | "taskdelete" | "tasktoggle",
|
||||
# "value": "Enable" | "Disable" # only needed for task toggle,
|
||||
# "task_id": 1
|
||||
# }
|
||||
|
||||
STATUS_CHOICES = [
|
||||
("pending", "Pending"),
|
||||
("completed", "Completed"),
|
||||
@@ -250,8 +243,6 @@ class PendingAction(models.Model):
|
||||
if self.action_type == "schedreboot":
|
||||
obj = dt.datetime.strptime(self.details["time"], "%Y-%m-%d %H:%M:%S")
|
||||
return dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
elif self.action_type == "taskaction":
|
||||
return "Next agent check-in"
|
||||
elif self.action_type == "agentupdate":
|
||||
return "Next update cycle"
|
||||
elif self.action_type == "chocoinstall":
|
||||
@@ -268,20 +259,6 @@ class PendingAction(models.Model):
|
||||
elif self.action_type == "chocoinstall":
|
||||
return f"{self.details['name']} software install"
|
||||
|
||||
elif self.action_type == "taskaction":
|
||||
if self.details["action"] == "taskdelete":
|
||||
return "Device pending task deletion"
|
||||
elif self.details["action"] == "taskcreate":
|
||||
return "Device pending task creation"
|
||||
elif self.details["action"] == "tasktoggle":
|
||||
# value is bool
|
||||
if self.details["value"]:
|
||||
action = "enable"
|
||||
else:
|
||||
action = "disable"
|
||||
|
||||
return f"Device pending task {action}"
|
||||
|
||||
|
||||
class BaseAuditModel(models.Model):
|
||||
# abstract base class for auditing models
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
asgiref==3.3.1
|
||||
asgiref==3.3.4
|
||||
asyncio-nats-client==0.11.4
|
||||
celery==5.0.5
|
||||
certifi==2020.12.5
|
||||
@@ -6,7 +6,8 @@ cffi==1.14.5
|
||||
channels==3.0.3
|
||||
chardet==4.0.0
|
||||
cryptography==3.4.7
|
||||
Django==3.1.7
|
||||
daphne==3.0.2
|
||||
Django==3.2.1
|
||||
django-cors-headers==3.7.0
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.4
|
||||
@@ -26,7 +27,7 @@ redis==3.5.3
|
||||
requests==2.25.1
|
||||
six==1.15.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.55.0
|
||||
twilio==6.57.0
|
||||
urllib3==1.26.4
|
||||
uWSGI==2.0.19.1
|
||||
validators==0.18.2
|
||||
|
||||
@@ -1,249 +1,384 @@
|
||||
[
|
||||
{
|
||||
"filename": "ClearFirefoxCache.ps1",
|
||||
"guid": "6820cb5e-5a7f-4d9b-8c22-d54677e3cc04",
|
||||
"filename": "Win_Firefox_Clear_Cache.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Clear Firefox Cache",
|
||||
"name": "Firefox - Clean Cache",
|
||||
"description": "This script will clean up Mozilla Firefox for all users.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Browsers"
|
||||
},
|
||||
{
|
||||
"filename": "ClearGoogleChromeCache.ps1",
|
||||
"guid": "3ff6a386-11d1-4f9d-8cca-1b0563bb6443",
|
||||
"filename": "Win_Google_Chrome_Clear_Cache.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Clear Google Chrome Cache",
|
||||
"name": "Chrome - Clear Cache for All Users",
|
||||
"description": "This script will clean up Google Chrome for all users.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Browsers"
|
||||
},
|
||||
{
|
||||
"filename": "InstallAdobeReader.ps1",
|
||||
"guid": "be1de837-f677-4ac5-aa0c-37a0fc9991fc",
|
||||
"filename": "Win_Install_Adobe_Reader.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Install Adobe Reader DC",
|
||||
"name": "Adobe Reader DC - Install",
|
||||
"description": "Installs Adobe Reader DC.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey"
|
||||
},
|
||||
{
|
||||
"guid": "2ee134d5-76aa-4160-b334-a1efbc62079f",
|
||||
"filename": "Win_Install_Duplicati.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Install Duplicati",
|
||||
"name": "Duplicati - Install",
|
||||
"description": "This script installs Duplicati 2.0.5.1 as a service.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"filename": "Reset-WindowsUpdate.ps1",
|
||||
"guid": "81cc5bcb-01bf-4b0c-89b9-0ac0f3fe0c04",
|
||||
"filename": "Win_Reset_Windows_Update.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Reset Windows Update",
|
||||
"name": "Windows Update - Reset",
|
||||
"description": "This script will reset all of the Windows Updates components to DEFAULT SETTINGS.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"filename": "Start-Cleanup.ps1",
|
||||
"guid": "8db87ff0-a9b4-4d9d-bc55-377bbcb85b6d",
|
||||
"filename": "Win_Start_Cleanup.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Cleanup C: drive",
|
||||
"name": "Disk - Cleanup C: drive",
|
||||
"description": "Cleans the C: drive's Window Temperary files, Windows SoftwareDistribution folder, the local users Temperary folder, IIS logs (if applicable) and empties the recycling bin. All deleted files will go into a log transcript in $env:TEMP. By default this script leaves files that are newer than 7 days old however this variable can be edited.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Maintenance"
|
||||
},
|
||||
{
|
||||
"filename": "WindowsDefenderFullScanBackground.ps1",
|
||||
"guid": "2f28e8c1-ae0f-4b46-a826-f513974526a3",
|
||||
"filename": "Win_Defender_FullScan_Background.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Windows Defender Full Scan",
|
||||
"name": "Defender - Full Scan",
|
||||
"description": "Runs a Windows Defender Full background scan.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"filename": "WindowsDefenderQuickScanBackground.ps1",
|
||||
"guid": "adf81ddb-3b77-415c-a89b-2ccc826b5aa7",
|
||||
"filename": "Win_Defender_QuickScan_Background.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Windows Defender Quick Scan",
|
||||
"name": "Defender - Quick Scan",
|
||||
"description": "Runs a Quick Scan using Windows Defender in the Background.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"filename": "speedtest.py",
|
||||
"guid": "3c46290b-85db-4cd2-93a2-943c8c93b3b1",
|
||||
"filename": "Speedtest.py",
|
||||
"submittedBy": "https://github.com/wh1te909",
|
||||
"name": "Speed Test",
|
||||
"description": "Runs a Speed Test",
|
||||
"shell": "python"
|
||||
"name": "Speed Test - Python",
|
||||
"description": "Runs a Speed Test using Python",
|
||||
"shell": "python",
|
||||
"category": "TRMM (Win):Network"
|
||||
},
|
||||
{
|
||||
"filename": "Rename-Installed-App.ps1",
|
||||
"guid": "9d34f482-1f0c-4b2f-b65f-a9cf3c13ef5f",
|
||||
"filename": "Win_TRMM_Rename_Installed_App.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "Rename Tactical RMM Agent",
|
||||
"name": "TacticalRMM Agent Rename",
|
||||
"description": "Updates the DisplayName registry entry for the Tactical RMM windows agent to your desired name. This script takes 1 required argument: the name you wish to set.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):TacticalRMM Related"
|
||||
},
|
||||
{
|
||||
"filename": "bitlocker_encrypted_drive_c.ps1",
|
||||
"guid": "525ae965-1dcf-4c17-92b3-5da3cf6819f5",
|
||||
"filename": "Win_Bitlocker_Encrypted_Drive_c.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Check C Drive for Bitlocker Status",
|
||||
"name": "Bitlocker - Check C Drive for Status",
|
||||
"description": "Runs a check on drive C for Bitlocker status.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "2ea35fa2-c227-4d17-a40e-4d39f252e27a",
|
||||
"filename": "Win_Bitlocker_Create_Status_Report.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Create Bitlocker Status Report",
|
||||
"name": "Bitlocker - Create Status Report",
|
||||
"description": "Creates a Bitlocker status report.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"filename": "bitlocker_retrieve_status_report.ps1",
|
||||
"guid": "9e5769c1-3873-4941-bf70-e851e0afbd6d",
|
||||
"filename": "Win_Bitlocker_Retrieve_Status_Report.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Retreive Bitlocker Status Report",
|
||||
"name": "Bitlocker - Retrieve Status Report",
|
||||
"description": "Retreives a Bitlocker status report.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "72b93487-0266-43f0-97cc-03d4c7ee0b44",
|
||||
"filename": "Win_Bitlocker_Get_Recovery_Keys.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Bitlocker - Get Recovery Keys",
|
||||
"description": "Retreives a Bitlocker Recovery Keys",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "cfa14c28-4dfc-4d4e-95ee-a380652e058d",
|
||||
"filename": "Win_Bios_Check.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Check BIOS Information",
|
||||
"name": "BIOS - Check Information",
|
||||
"description": "Retreives and reports on BIOS make, version, and date.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"filename": "ResetHighPerformancePowerProfiletoDefaults.ps1",
|
||||
"guid": "e1c27982-b955-4766-85b6-d92527a177cf",
|
||||
"filename": "Win_Hardware_Monitor_Get_Info.ps1",
|
||||
"submittedBy": "https://github.com/MaxAnderson95/",
|
||||
"name": "Monitor - Get Info",
|
||||
"description": "Retreives and reports on Monitor info: Manufacturer, Model, Serial",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"guid": "ae231ac4-b01f-4a39-a9d2-3d817af75260",
|
||||
"filename": "Win_Hardware_RAM_Status.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "RAM - Check Information",
|
||||
"description": "Retreives and reports on RAM info: DIMM's, total memory, slots total and used",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"guid": "95a2ee6f-b89b-4551-856e-3081b041caa7",
|
||||
"filename": "Win_Power_Profile_Reset_High_Performance_to_Defaults.ps1",
|
||||
"submittedBy": "https://github.com/azulskyknight",
|
||||
"name": "Reset High Perf Power Profile",
|
||||
"name": "Power Profile - Reset High Perf Power Profile to defaults",
|
||||
"description": "Resets monitor, disk, standby, and hibernate timers in the default High Performance power profile to their default values. It also re-indexes the AC and DC power profiles into their default order.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"filename": "SetHighPerformancePowerProfile.ps1",
|
||||
"guid": "2cbd30b0-84dd-4388-a36d-2e2e980f1a3e",
|
||||
"filename": "Win_Power_Profile_Set_High_Performance.ps1",
|
||||
"submittedBy": "https://github.com/azulskyknight",
|
||||
"name": "Set High Perf Power Profile",
|
||||
"name": "Power Profile - Set High Performance",
|
||||
"description": "Sets the High Performance Power profile to the active power profile. Use this to keep machines from falling asleep.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"filename": "Windows10Upgrade.ps1",
|
||||
"guid": "553236d3-81bc-49f4-af8a-0cff925a7f6d",
|
||||
"filename": "Win_10_Upgrade.ps1",
|
||||
"submittedBy": "https://github.com/RVL-Solutions and https://github.com/darimm",
|
||||
"name": "Windows 10 Upgrade",
|
||||
"description": "Forces an upgrade to the latest release of Windows 10.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"filename": "DiskStatus.ps1",
|
||||
"guid": "375323e5-cac6-4f35-a304-bb7cef35902d",
|
||||
"filename": "Win_Disk_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Check Disks",
|
||||
"name": "Disk Hardware Health Check (using Event Viewer errors)",
|
||||
"description": "Checks local disks for errors reported in event viewer within the last 24 hours",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"filename": "DuplicatiStatus.ps1",
|
||||
"guid": "7c14beb4-d1c3-41aa-8e70-92a267d6e080",
|
||||
"filename": "Win_Duplicati_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Check Duplicati",
|
||||
"name": "Duplicati - Check Status",
|
||||
"description": "Checks Duplicati Backup is running properly over the last 24 hours",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"filename": "EnableDefender.ps1",
|
||||
"guid": "da51111c-aff6-4d87-9d76-0608e1f67fe5",
|
||||
"filename": "Win_Defender_Enable.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Enable Windows Defender",
|
||||
"name": "Defender - Enable",
|
||||
"description": "Enables Windows Defender and sets preferences",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"filename": "OpenSSHServerInstall.ps1",
|
||||
"guid": "a223d03a-e22e-40e0-94f2-92dd8c481d14",
|
||||
"filename": "Win_Open_SSH_Server_Install.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Install SSH",
|
||||
"description": "Installs and enabled OpenSSH Server",
|
||||
"shell": "powershell"
|
||||
"name": "SSH - Install Feature and Enable",
|
||||
"description": "Installs and enabled OpenSSH Server Feature in Win10",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"filename": "RDP_enable.bat",
|
||||
"guid": "2435297a-6263-4e90-8688-1847400d0e22",
|
||||
"filename": "Win_RDP_enable.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Enable RDP",
|
||||
"name": "RDP - Enable",
|
||||
"description": "Enables RDP",
|
||||
"shell": "cmd"
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"filename": "Speedtest.ps1",
|
||||
"guid": "24f19ead-fdfe-46b4-9dcb-4cd0e12a3940",
|
||||
"filename": "Win_Speedtest.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "PS Speed Test",
|
||||
"description": "Powershell speed test (win 10 or server2016+)",
|
||||
"shell": "powershell"
|
||||
"name": "Speed Test - Powershell",
|
||||
"description": "Speed Test with Powershell(win 10 or server2016+)",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network"
|
||||
},
|
||||
{
|
||||
"filename": "SyncTime.bat",
|
||||
"guid": "a821975c-60df-4d58-8990-6cf8a55b4ee0",
|
||||
"filename": "Win_Sync_Time.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Sync DC Time",
|
||||
"name": "ADDC - Sync DC Time",
|
||||
"description": "Syncs time with domain controller",
|
||||
"shell": "cmd"
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Active Directory"
|
||||
},
|
||||
{
|
||||
"filename": "WinDefenderClearLogs.ps1",
|
||||
"guid": "b6b9912f-4274-4162-99cc-9fd47fbcb292",
|
||||
"filename": "Win_ADDC_Sync_Start.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "ADDC - Sync AD",
|
||||
"description": "Trigger AD Sync on domain controller",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Active Directory"
|
||||
},
|
||||
{
|
||||
"guid": "b720e320-7755-4c89-9992-e1a6c43699ed",
|
||||
"filename": "Win_Defender_Clear_Logs.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Clear Defender Logs",
|
||||
"name": "Defender - Clear Logs",
|
||||
"description": "Clears Windows Defender Logs",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"filename": "WinDefenderStatus.ps1",
|
||||
"guid": "d980fda3-a068-47eb-8495-1aab07a24e64",
|
||||
"filename": "Win_Defender_Status_Report.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Defender Status",
|
||||
"description": "This will check for Malware, Antispyware, that Windows Defender is Healthy, last scan etc within the last 24 hours",
|
||||
"shell": "powershell"
|
||||
"name": "Defender - Status Report",
|
||||
"description": "This will check for Malware and Antispyware within the last 24 hours and display, otherwise will report as Healthy. Command Parameter: (number) if provided will check that number of days back in the log.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"filename": "disable_FastStartup.bat",
|
||||
"guid": "9956e936-6fdb-4488-a9d8-8b274658037f",
|
||||
"filename": "Win_Disable_Fast_Startup.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Disable Fast Startup",
|
||||
"name": "Power - Fast Startup Disable",
|
||||
"description": "Disables Faststartup on Windows 10",
|
||||
"shell": "cmd"
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"filename": "updatetacticalexclusion.ps1",
|
||||
"guid": "f628a02b-16c3-4ab5-b788-dec5bc2af1d9",
|
||||
"filename": "Win_Power_Disable_Hibernation.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Power - Hibernate Disable",
|
||||
"description": "Disables Hibernation",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"guid": "2472bbaf-1941-4722-8a58-d1dd0f528801",
|
||||
"filename": "Win_TRMM_AV_Update_Exclusion.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "TRMM Defender Exclusions",
|
||||
"description": "Windows Defender Exclusions for Tactical RMM",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"filename": "Display_Message_To_User.ps1",
|
||||
"guid": "b253dc76-41a0-48ca-9cea-bee4277402c4",
|
||||
"filename": "Win_Display_Message_To_User.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "Display Message To User",
|
||||
"name": "Message Popup To User",
|
||||
"description": "Displays a popup message to the currently logged on user",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"filename": "VerifyAntivirus.ps1",
|
||||
"guid": "19224d21-bd39-44bc-b9cf-8f1ba3ca9c11",
|
||||
"filename": "Win_Antivirus_Verify.ps1",
|
||||
"submittedBy": "https://github.com/beejayzed",
|
||||
"name": "Verify Antivirus Status",
|
||||
"name": "Antivirus - Verify Status",
|
||||
"description": "Verify and display status for all installed Antiviruses",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"filename": "CreateAllUserLogonScript.ps1",
|
||||
"guid": "f88c5c52-c6fe-44db-b727-b7912a4279ed",
|
||||
"filename": "Win_Create_All_User_Logon_Script.ps1",
|
||||
"submittedBy": "https://github.com/nr-plaxon",
|
||||
"name": "Create User Logon Script",
|
||||
"name": "Template Example - Create User Logon Script",
|
||||
"description": "Creates a powershell script that runs at logon of any user on the machine in the security context of the user.",
|
||||
"shell": "powershell"
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "5615aa90-0272-427b-8acf-0ca019612501",
|
||||
"filename": "Win_Chocolatey_Update_Installed.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Chocolatey Update Installed Apps",
|
||||
"name": "Update Installed Apps",
|
||||
"description": "Update all apps that were installed using Chocolatey.",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey"
|
||||
},
|
||||
{
|
||||
"guid": "fff8024d-d72e-4457-84fa-6c780f69a16f",
|
||||
"filename": "Win_AD_Check_And_Enable_AD_Recycle_Bin.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "AD - Check and Enable AD Recycle Bin",
|
||||
"name": "ADDC - Check and Enable AD Recycle Bin",
|
||||
"description": "Only run on Domain Controllers, checks for Active Directory Recycle Bin and enables if not already enabled",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Active Directory"
|
||||
},
|
||||
{
|
||||
"filename": "Check_Events_for_Bluescreens.ps1",
|
||||
"guid": "71090fc4-faa6-460b-adb0-95d7863544e1",
|
||||
"filename": "Win_Check_Events_for_Bluescreens.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Event Viewer - Check for Bluescreens",
|
||||
"description": "This will check for Bluescreen events on your system",
|
||||
"name": "Event Viewer - Bluescreen Notification",
|
||||
"description": "Event Viewer Monitor - Notify Bluescreen events on your system",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
{
|
||||
"guid": "8373846f-facc-49b9-9891-3a780a394c89",
|
||||
"filename": "Win_Local_User_Created_Monitor.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Event Viewer - New User Notification",
|
||||
"description": "Event Viewer Monitor - Notify when new Local user is created",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
{
|
||||
"guid": "65e5cef1-8338-4180-a0bc-cd54e62de690",
|
||||
"filename": "Win_Task_Scheduler_New_Items_Monitor.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Event Viewer - Task Scheduler New Item Notification",
|
||||
"description": "Event Viewer Monitor - Notify when new Task Scheduler item is created",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
{
|
||||
"guid": "08ca81f2-f044-4dfc-ad47-090b19b19d76",
|
||||
"filename": "Win_User_Logged_in_with_Temp_Profile.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "User Logged in with temp profile check",
|
||||
"description": "Check if users are logged in with a temp profile",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "5d905886-9eb1-4129-8b81-a013f842eb24",
|
||||
"filename": "Win_Rename_Computer.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Rename Computer",
|
||||
@@ -253,19 +388,258 @@
|
||||
"default_timeout": 30
|
||||
},
|
||||
{
|
||||
"filename": "Finish_updates_and_restart.ps1",
|
||||
"guid": "f396dae2-c768-45c5-bd6c-176e56ed3614",
|
||||
"filename": "Win_Power_RestartorShutdown.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Finish updates and restart",
|
||||
"description": "Finish installing updates and restart PC",
|
||||
"name": "Power - Restart or Shutdown PC",
|
||||
"description": "Restart PC. Add parameter: shutdown if you want to shutdown computer",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "e09895d5-ca13-44a2-a38c-6e77c740f0e8",
|
||||
"filename": "Win_ScreenConnectAIO.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "ScreenConnect AIO",
|
||||
"description": "Install, Uninstall, Start and Stop ScreenConnect Access Agent",
|
||||
"args": [
|
||||
"-serviceName {{client.ScreenConnectService}}",
|
||||
"-url {{client.ScreenConnectInstaller}}",
|
||||
"-clientname {{client.name}}",
|
||||
"-sitename {{site.name}}",
|
||||
"-action install"
|
||||
],
|
||||
"default_timeout": "90",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "3abbb62a-3757-492c-8979-b4fc6174845d",
|
||||
"filename": "Win_AutoRun_Disable.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Autorun - Disable",
|
||||
"description": "Disable Autorun System Wide",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "30"
|
||||
},
|
||||
{
|
||||
"guid": "4a11877a-7555-494c-ac74-29d6df3c1989",
|
||||
"filename": "Win_Cortana_Disable.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Cortana - Disable",
|
||||
"description": "Disable Cortana System Wide",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "30"
|
||||
},
|
||||
{
|
||||
"guid": "28ef1387-dd4f-4bab-b042-26250914e370",
|
||||
"filename": "Win_WOL_Enable_Status.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "BROKEN Network WoL - Enable function",
|
||||
"description": "Wake on Lan enable on Dell, HP, Lenovo",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "685d5432-0b84-46d5-98e8-3ec2054150fe",
|
||||
"filename": "Win_WOL_Test_State.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "BROKEN Network WoL - Test State",
|
||||
"description": "Wake on Lan test status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "abe78170-7cf9-435b-9666-c5ef6c11a106",
|
||||
"filename": "Win_Network_IPv6_Disable.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network IPv6 - Disable",
|
||||
"description": "Disable IPv6 on all adapters",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "745bb7cd-b71a-4f2e-b6f2-c579b1828162",
|
||||
"filename": "Win_Network_DHCP_Set.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network - Set Primary NIC to DHCP",
|
||||
"description": "Enable DHCP on primary adapter",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "83aa4d51-63ce-41e7-829f-3c16e6115bbf",
|
||||
"filename": "Win_Network_DNS_Set_to_1.1.1.2.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network - Set all NICs to use DNS of 1.1.1.2",
|
||||
"description": "Domain computers skipped. Sets all NICs to have primary DNS server of 1.1.1.2, backup of 1.0.0.2 (Cloudflare malware blocking)",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "0caa33bc-89ca-47e0-ad4a-04626ae6384d",
|
||||
"filename": "Win_Network_TCP_Reset_Stack.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network - Reset tcp using netsh",
|
||||
"description": "resets tcp stack using netsh",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "120"
|
||||
},
|
||||
{
|
||||
"guid": "6ce5682a-49db-4c0b-9417-609cf905ac43",
|
||||
"filename": "Win_Win10_Change_Key_and_Activate.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Product Key in Win10 Change and Activate",
|
||||
"description": "Insert new product key and Activate. Requires 1 parameter the product key you want to use",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "83f6c6ea-6120-4fd3-bec8-d3abc505dcdf",
|
||||
"filename": "Win_TRMM_Start_Menu_Delete_Shortcut.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "TacticalRMM Delete Start Menu Shortcut for App",
|
||||
"description": "Delete its application shortcut that's installed in the start menu by default",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):TacticalRMM Related",
|
||||
"default_timeout": "10"
|
||||
},
|
||||
{
|
||||
"guid": "60130fca-7636-446e-acd7-cc5d29d609c2",
|
||||
"filename": "Win_Firewall_Check_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Windows Firewall - Check Status",
|
||||
"description": "Windows Firewall - Check state, report status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network"
|
||||
},
|
||||
{
|
||||
"guid": "93379675-c01c-433f-87df-a11597c959f0",
|
||||
"filename": "Win_UAC_Check_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Windows UAC - Check Status",
|
||||
"description": "Windows UAC - Report status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security"
|
||||
},
|
||||
{
|
||||
"guid": "7ea6a11a-05c0-4151-b5c1-cb8af029299f",
|
||||
"filename": "Win_AzureAD_Check_Connection_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Azure AD - Check Status",
|
||||
"description": "Azure AD - Check if joined or not",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Azure>AD"
|
||||
},
|
||||
{
|
||||
"guid": "7d81859a-1ba3-42b0-8664-29844f0dd765",
|
||||
"filename": "Win_Azure_Mars_Cloud_Backup_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Azure - Mars Cloud backup Status",
|
||||
"description": "Azure - Mars Cloud backup Check Status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Azure>Backup"
|
||||
},
|
||||
{
|
||||
"guid": "e18c64d0-b783-4b52-b44b-9bb7592b439b",
|
||||
"filename": "Win_FileSystem_Enable_Long_Paths.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "File System - Enable Long Paths",
|
||||
"description": "Enables NTFS Long paths greater than 260 characters",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "c6252ca8-5172-42ea-9114-e447f80868f5",
|
||||
"filename": "Win_USB_Disable_Access.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "USB - Disable Access",
|
||||
"description": "USB - Disable Plugged in USB devices",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "3785952f-69fb-4bda-b2fe-5e3e8642738a",
|
||||
"filename": "Win_USB_Enable_Access.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "USB - Enable Access",
|
||||
"description": "USB - Enable Plugged in USB devices",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "c6014da2-b188-4e1b-b96a-e3440ade3a6a",
|
||||
"filename": "Win_RecycleBin_Empty.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "File System - Empty Recycle Bin",
|
||||
"description": "Empty the recycle bin",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "57997ec7-b293-4fd5-9f90-a25426d0eb90",
|
||||
"filename": "Win_Get_Computer_Users.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Get Computer Users",
|
||||
"description": "Get list of computer users and show which one is enabled",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"filename": "Finish_updates_and_shutdown.ps1",
|
||||
"guid": "77da9c87-5a7a-4ba1-bdde-3eeb3b01d62d",
|
||||
"filename": "Win_Network_Set_To_Private.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Finish updates and shutdown",
|
||||
"description": "Finish installing updates and shutdown PC",
|
||||
"name": "Network Category - Set Network To Private",
|
||||
"description": "Sets current network type to Private",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network"
|
||||
},
|
||||
{
|
||||
"guid": "768f42d5-7b45-45ed-8233-254ae537aaa2",
|
||||
"filename": "Win_TaskScheduler_Add_Task.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Task Scheduler - Add a task",
|
||||
"description": "Add a task to Task Scheduler, needs editing",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "e371f1c6-0dd9-44de-824c-a17e1ca4c4ab",
|
||||
"filename": "Win_Outlook_SentItems_To_Delegated_Folders.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Outlook - Delegated folders set for all profiles",
|
||||
"description": "Uses RunAsUser to setup sent items for the currently logged on user on delegated folders to go into the delegated folders sent for all.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Office",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "17040742-184a-4251-8f7b-4a1b0a1f02d1",
|
||||
"filename": "Win_File_Copy_Misc.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "EXAMPLE File Copying using powershell",
|
||||
"description": "Reference Script: Will need manual tweaking, for copying files/folders from paths/websites to local",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Misc>Reference",
|
||||
"default_timeout": "1"
|
||||
},
|
||||
{
|
||||
"guid": "168037d8-78e6-4a6a-a9a9-8ec2c1dbe949",
|
||||
"filename": "Win_MSI_Install.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "EXAMPLE Function for running MSI install via powershell",
|
||||
"description": "Reference Script: Will need manual tweaking, for running MSI from powershell",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Misc>Reference",
|
||||
"default_timeout": "1"
|
||||
}
|
||||
]
|
||||
18
api/tacticalrmm/scripts/migrations/0008_script_guid.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-15 02:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0007_script_args'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='script',
|
||||
name='guid',
|
||||
field=models.CharField(blank=True, max_length=64, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,10 +1,11 @@
|
||||
import base64
|
||||
import re
|
||||
from loguru import logger
|
||||
from typing import Any, List, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
@@ -23,6 +24,7 @@ logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class Script(BaseAuditModel):
|
||||
guid = name = models.CharField(max_length=64, null=True, blank=True)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(null=True, blank=True)
|
||||
filename = models.CharField(max_length=255) # deprecated
|
||||
@@ -78,16 +80,14 @@ class Script(BaseAuditModel):
|
||||
|
||||
for script in info:
|
||||
if os.path.exists(os.path.join(scripts_dir, script["filename"])):
|
||||
s = cls.objects.filter(script_type="builtin").filter(
|
||||
name=script["name"]
|
||||
)
|
||||
s = cls.objects.filter(script_type="builtin", guid=script["guid"])
|
||||
|
||||
category = (
|
||||
script["category"] if "category" in script.keys() else "Community"
|
||||
)
|
||||
|
||||
default_timeout = (
|
||||
script["default_timeout"]
|
||||
int(script["default_timeout"])
|
||||
if "default_timeout" in script.keys()
|
||||
else 90
|
||||
)
|
||||
@@ -120,6 +120,46 @@ class Script(BaseAuditModel):
|
||||
"args",
|
||||
]
|
||||
)
|
||||
|
||||
# check if script was added without a guid
|
||||
elif cls.objects.filter(
|
||||
script_type="builtin", name=script["name"]
|
||||
).exists():
|
||||
s = cls.objects.get(script_type="builtin", name=script["name"])
|
||||
|
||||
if not s.guid:
|
||||
print(f"Updating GUID for: {script['name']}")
|
||||
s.guid = script["guid"]
|
||||
s.name = script["name"]
|
||||
s.description = script["description"]
|
||||
s.category = category
|
||||
s.shell = script["shell"]
|
||||
s.default_timeout = default_timeout
|
||||
s.args = args
|
||||
|
||||
with open(
|
||||
os.path.join(scripts_dir, script["filename"]), "rb"
|
||||
) as f:
|
||||
script_bytes = (
|
||||
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||
)
|
||||
s.code_base64 = base64.b64encode(script_bytes).decode(
|
||||
"ascii"
|
||||
)
|
||||
|
||||
s.save(
|
||||
update_fields=[
|
||||
"guid",
|
||||
"name",
|
||||
"description",
|
||||
"category",
|
||||
"default_timeout",
|
||||
"code_base64",
|
||||
"shell",
|
||||
"args",
|
||||
]
|
||||
)
|
||||
|
||||
else:
|
||||
print(f"Adding new community script: {script['name']}")
|
||||
|
||||
@@ -131,6 +171,7 @@ class Script(BaseAuditModel):
|
||||
|
||||
cls(
|
||||
code_base64=code_base64,
|
||||
guid=script["guid"],
|
||||
name=script["name"],
|
||||
description=script["description"],
|
||||
filename=script["filename"],
|
||||
@@ -141,6 +182,9 @@ class Script(BaseAuditModel):
|
||||
args=args,
|
||||
).save()
|
||||
|
||||
# delete community scripts that had their name changed
|
||||
cls.objects.filter(script_type="builtin", guid=None).delete()
|
||||
|
||||
@staticmethod
|
||||
def serialize(script):
|
||||
# serializes the script and returns json
|
||||
@@ -152,9 +196,9 @@ class Script(BaseAuditModel):
|
||||
def parse_script_args(
|
||||
cls, agent, shell: str, args: List[str] = list()
|
||||
) -> Union[List[str], None]:
|
||||
from core.models import CustomField
|
||||
from core.models import CustomField, GlobalKVStore
|
||||
|
||||
if not list:
|
||||
if not args:
|
||||
return []
|
||||
|
||||
temp_args = list()
|
||||
@@ -176,6 +220,18 @@ class Script(BaseAuditModel):
|
||||
# ignore arg since it is invalid
|
||||
continue
|
||||
|
||||
# value is in the global keystore and replace value
|
||||
if temp[0] == "global":
|
||||
if GlobalKVStore.objects.filter(name=temp[1]).exists():
|
||||
value = GlobalKVStore.objects.get(name=temp[1]).value
|
||||
temp_args.append(
|
||||
re.sub("\\{\\{.*\\}\\}", "'" + value + "'", arg)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
# ignore since value doesn't exist
|
||||
continue
|
||||
|
||||
if temp[0] == "client":
|
||||
model = "client"
|
||||
obj = agent.client
|
||||
@@ -219,7 +275,7 @@ class Script(BaseAuditModel):
|
||||
# replace the value in the arg and push to array
|
||||
# log any unhashable type errors
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", "'" + value + "'", arg)) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
from email.policy import default
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
@@ -206,6 +207,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
) as f:
|
||||
info = json.load(f)
|
||||
|
||||
guids = []
|
||||
for script in info:
|
||||
fn: str = script["filename"]
|
||||
self.assertTrue(os.path.exists(os.path.join(scripts_dir, fn)))
|
||||
@@ -222,6 +224,19 @@ class TestScriptViews(TacticalTestCase):
|
||||
elif fn.endswith(".py"):
|
||||
self.assertEqual(script["shell"], "python")
|
||||
|
||||
if "args" in script.keys():
|
||||
self.assertIsInstance(script["args"], list)
|
||||
|
||||
# allows strings as long as they can be type casted to int
|
||||
if "default_timeout" in script.keys():
|
||||
self.assertIsInstance(int(script["default_timeout"]), int)
|
||||
|
||||
self.assertIn("guid", script.keys())
|
||||
guids.append(script["guid"])
|
||||
|
||||
# check guids are unique
|
||||
self.assertEqual(len(guids), len(set(guids)))
|
||||
|
||||
def test_load_community_scripts(self):
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||
@@ -230,12 +245,39 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
Script.load_community_scripts()
|
||||
|
||||
community_scripts = Script.objects.filter(script_type="builtin").count()
|
||||
self.assertEqual(len(info), community_scripts)
|
||||
community_scripts_count = Script.objects.filter(script_type="builtin").count()
|
||||
if len(info) != community_scripts_count:
|
||||
raise Exception(
|
||||
f"There are {len(info)} scripts in json file but only {community_scripts_count} in database"
|
||||
)
|
||||
|
||||
# test updating already added community scripts
|
||||
Script.load_community_scripts()
|
||||
self.assertEqual(len(info), community_scripts)
|
||||
community_scripts_count2 = Script.objects.filter(script_type="builtin").count()
|
||||
self.assertEqual(len(info), community_scripts_count2)
|
||||
|
||||
def test_community_script_has_jsonfile_entry(self):
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||
) as f:
|
||||
info = json.load(f)
|
||||
|
||||
filenames = [i["filename"] for i in info]
|
||||
|
||||
# normal
|
||||
if not settings.DOCKER_BUILD:
|
||||
scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], "scripts")
|
||||
# docker
|
||||
else:
|
||||
scripts_dir = settings.SCRIPTS_DIR
|
||||
|
||||
with os.scandir(scripts_dir) as it:
|
||||
for f in it:
|
||||
if not f.name.startswith(".") and f.is_file():
|
||||
if f.name not in filenames:
|
||||
raise Exception(
|
||||
f"{f.name} is missing an entry in community_scripts.json"
|
||||
)
|
||||
|
||||
def test_script_filenames_do_not_contain_spaces(self):
|
||||
with open(
|
||||
@@ -244,4 +286,5 @@ class TestScriptViews(TacticalTestCase):
|
||||
info = json.load(f)
|
||||
for script in info:
|
||||
fn: str = script["filename"]
|
||||
self.assertTrue(" " not in fn)
|
||||
if " " in fn:
|
||||
raise Exception(f"{fn} must not contain spaces in filename")
|
||||
|
||||
@@ -15,28 +15,34 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# latest release
|
||||
TRMM_VERSION = "0.5.0"
|
||||
TRMM_VERSION = "0.6.8"
|
||||
|
||||
# bump this version everytime vue code is changed
|
||||
# to alert user they need to manually refresh their browser
|
||||
APP_VER = "0.0.126"
|
||||
APP_VER = "0.0.134"
|
||||
|
||||
# https://github.com/wh1te909/rmmagent
|
||||
LATEST_AGENT_VER = "1.4.13"
|
||||
LATEST_AGENT_VER = "1.5.5"
|
||||
|
||||
MESH_VER = "0.7.93"
|
||||
MESH_VER = "0.8.19"
|
||||
|
||||
# for the update script, bump when need to recreate venv or npm install
|
||||
PIP_VER = "14"
|
||||
NPM_VER = "13"
|
||||
PIP_VER = "16"
|
||||
NPM_VER = "15"
|
||||
|
||||
SETUPTOOLS_VER = "56.1.0"
|
||||
WHEEL_VER = "0.36.2"
|
||||
|
||||
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
|
||||
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
|
||||
|
||||
EXE_GEN_URLS = [
|
||||
"https://exe.tacticalrmm.io/api/v1/exe",
|
||||
"https://exe2.tacticalrmm.io",
|
||||
"https://exe.tacticalrmm.io",
|
||||
]
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
|
||||
|
||||
ASGI_APPLICATION = "tacticalrmm.asgi.application"
|
||||
|
||||
try:
|
||||
|
||||
@@ -12,6 +12,7 @@ from .utils import (
|
||||
generate_winagent_exe,
|
||||
get_bit_days,
|
||||
reload_nats,
|
||||
run_nats_api_cmd,
|
||||
)
|
||||
|
||||
|
||||
@@ -65,8 +66,7 @@ class TestUtils(TestCase):
|
||||
mock_subprocess.assert_not_called()
|
||||
|
||||
@override_settings(
|
||||
ALLOWED_HOSTS=["api.example.com"],
|
||||
SECRET_KEY="sekret",
|
||||
ALLOWED_HOSTS=["api.example.com"], SECRET_KEY="sekret", DOCKER_BUILD=False
|
||||
)
|
||||
@patch("subprocess.run")
|
||||
def test_reload_nats(self, mock_subprocess):
|
||||
@@ -74,6 +74,12 @@ class TestUtils(TestCase):
|
||||
|
||||
mock_subprocess.assert_called_once()
|
||||
|
||||
@patch("subprocess.run")
|
||||
def test_run_nats_api_cmd(self, mock_subprocess):
|
||||
ids = ["a", "b", "c"]
|
||||
_ = run_nats_api_cmd("monitor", ids)
|
||||
mock_subprocess.assert_called_once()
|
||||
|
||||
def test_bitdays_to_string(self):
|
||||
a = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"]
|
||||
all_days = [
|
||||
|
||||
@@ -4,6 +4,7 @@ import string
|
||||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
import urllib.parse
|
||||
from typing import Union
|
||||
|
||||
import pytz
|
||||
@@ -19,6 +20,7 @@ from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CodeSignToken
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@@ -50,12 +52,27 @@ def generate_winagent_exe(
|
||||
file_name: str,
|
||||
) -> Union[Response, FileResponse]:
|
||||
|
||||
from agents.utils import get_exegen_url
|
||||
|
||||
inno = (
|
||||
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||
if arch == "64"
|
||||
else f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe"
|
||||
)
|
||||
|
||||
try:
|
||||
codetoken = CodeSignToken.objects.first().token
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"arch": arch,
|
||||
"token": codetoken,
|
||||
}
|
||||
dl_url = base_url + urllib.parse.urlencode(params)
|
||||
except:
|
||||
codetoken = ""
|
||||
dl_url = settings.DL_64 if arch == "64" else settings.DL_32
|
||||
|
||||
data = {
|
||||
"client": client,
|
||||
"site": site,
|
||||
@@ -66,8 +83,9 @@ def generate_winagent_exe(
|
||||
"goarch": "amd64" if arch == "64" else "386",
|
||||
"token": token,
|
||||
"inno": inno,
|
||||
"url": settings.DL_64 if arch == "64" else settings.DL_32,
|
||||
"url": dl_url,
|
||||
"api": api,
|
||||
"codesigntoken": codetoken,
|
||||
}
|
||||
headers = {"Content-type": "application/json"}
|
||||
|
||||
@@ -76,7 +94,7 @@ def generate_winagent_exe(
|
||||
for url in settings.EXE_GEN_URLS:
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
f"{url}/api/v1/exe",
|
||||
json=data,
|
||||
headers=headers,
|
||||
stream=True,
|
||||
@@ -228,3 +246,37 @@ class KnoxAuthMiddlewareInstance:
|
||||
KnoxAuthMiddlewareStack = lambda inner: KnoxAuthMiddlewareInstance(
|
||||
AuthMiddlewareStack(inner)
|
||||
)
|
||||
|
||||
|
||||
def run_nats_api_cmd(mode: str, ids: list[str], timeout: int = 30) -> None:
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"agents": ids,
|
||||
}
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", mode]
|
||||
try:
|
||||
subprocess.run(cmd, capture_output=True, timeout=timeout)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
|
||||
def get_latest_trmm_ver() -> str:
|
||||
url = "https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/api/tacticalrmm/tacticalrmm/settings.py"
|
||||
try:
|
||||
r = requests.get(url, timeout=5)
|
||||
except:
|
||||
return "error"
|
||||
|
||||
try:
|
||||
for line in r.text.splitlines():
|
||||
if "TRMM_VERSION" in line:
|
||||
return line.split(" ")[2].strip('"')
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
return "error"
|
||||
|
||||
@@ -14,8 +14,29 @@ class TestWinUpdateViews(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_get_winupdates(self):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_update_scan(self, nats_cmd):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
url = f"/winupdate/{agent.pk}/runupdatescan/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "getwinupdates"}, wait=False)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_install_updates(self, nats_cmd):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
baker.make("winupdate.WinUpdate", agent=agent, _quantity=4)
|
||||
baker.make("winupdate.WinUpdatePolicy", agent=agent)
|
||||
url = f"/winupdate/{agent.pk}/installnow/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_once()
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_winupdates(self):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
baker.make("winupdate.WinUpdate", agent=agent, _quantity=4)
|
||||
|
||||
@@ -27,8 +48,8 @@ class TestWinUpdateViews(TacticalTestCase):
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = UpdateSerializer(agent)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data["winupdates"]), 4)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
self.assertEqual(len(resp.data["winupdates"]), 4) # type: ignore
|
||||
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -99,7 +120,7 @@ class TestWinUpdateViews(TacticalTestCase):
|
||||
resp = self.client.patch(url, invalid_data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
data = {"pk": winupdate.pk, "policy": "inherit"}
|
||||
data = {"pk": winupdate.pk, "policy": "inherit"} # type: ignore
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import asyncio
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from packaging import version as pyver
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.utils import get_default_timezone, notify_error
|
||||
from tacticalrmm.utils import get_default_timezone
|
||||
|
||||
from .models import WinUpdate
|
||||
from .serializers import UpdateSerializer
|
||||
@@ -24,9 +23,6 @@ def get_win_updates(request, pk):
|
||||
def run_update_scan(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
agent.delete_superseded_updates()
|
||||
if pyver.parse(agent.version) < pyver.parse("1.3.0"):
|
||||
return notify_error("Requires agent version 1.3.0 or greater")
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
||||
return Response("ok")
|
||||
|
||||
@@ -35,9 +31,6 @@ def run_update_scan(request, pk):
|
||||
def install_updates(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
agent.delete_superseded_updates()
|
||||
if pyver.parse(agent.version) < pyver.parse("1.3.0"):
|
||||
return notify_error("Requires agent version 1.3.0 or greater")
|
||||
|
||||
agent.approve_updates()
|
||||
nats_data = {
|
||||
"func": "installwinupdates",
|
||||
|
||||
@@ -27,7 +27,7 @@ jobs:
|
||||
source env/bin/activate
|
||||
cd /myagent/_work/1/s/api/tacticalrmm
|
||||
pip install --no-cache-dir --upgrade pip
|
||||
pip install --no-cache-dir setuptools==53.0.0 wheel==0.36.2
|
||||
pip install --no-cache-dir setuptools==54.2.0 wheel==0.36.2
|
||||
pip install --no-cache-dir -r requirements.txt -r requirements-test.txt -r requirements-dev.txt
|
||||
displayName: "Install Python Dependencies"
|
||||
|
||||
|
||||
@@ -29,15 +29,15 @@ function check_tactical_ready {
|
||||
# tactical-init
|
||||
if [ "$1" = 'tactical-init' ]; then
|
||||
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
mkdir -p ${TACTICAL_DIR}/scripts/userdefined
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
|
||||
|
||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||
|
||||
# copy container data to volume
|
||||
rsync -a --no-perms --no-owner --delete --exclude "tmp/*" --exclude "certs/*" --exclude="api/tacticalrmm/private/*" "${TACTICAL_TMP_DIR}/" "${TACTICAL_DIR}/"
|
||||
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/logs
|
||||
|
||||
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||
echo "waiting for postgresql container to be ready..."
|
||||
sleep 5
|
||||
|
||||
@@ -22,6 +22,7 @@ volumes:
|
||||
services:
|
||||
# postgres database for api service
|
||||
tactical-postgres:
|
||||
container_name: trmm-postgres
|
||||
image: postgres:13-alpine
|
||||
restart: always
|
||||
environment:
|
||||
@@ -35,6 +36,7 @@ services:
|
||||
|
||||
# redis container for celery tasks
|
||||
tactical-redis:
|
||||
container_name: trmm-redis
|
||||
image: redis:6.0-alpine
|
||||
restart: always
|
||||
networks:
|
||||
@@ -42,6 +44,7 @@ services:
|
||||
|
||||
# used to initialize the docker environment
|
||||
tactical-init:
|
||||
container_name: trmm-init
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
restart: on-failure
|
||||
command: ["tactical-init"]
|
||||
@@ -65,6 +68,7 @@ services:
|
||||
|
||||
# nats
|
||||
tactical-nats:
|
||||
container_name: trmm-nats
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -80,6 +84,7 @@ services:
|
||||
|
||||
# meshcentral container
|
||||
tactical-meshcentral:
|
||||
container_name: trmm-meshcentral
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -101,6 +106,7 @@ services:
|
||||
|
||||
# mongodb container for meshcentral
|
||||
tactical-mongodb:
|
||||
container_name: trmm-mongodb
|
||||
image: mongo:4.4
|
||||
restart: always
|
||||
environment:
|
||||
@@ -114,6 +120,7 @@ services:
|
||||
|
||||
# container that hosts vue frontend
|
||||
tactical-frontend:
|
||||
container_name: trmm-frontend
|
||||
image: ${IMAGE_REPO}tactical-frontend:${VERSION}
|
||||
restart: always
|
||||
networks:
|
||||
@@ -123,6 +130,7 @@ services:
|
||||
|
||||
# container for django backend
|
||||
tactical-backend:
|
||||
container_name: trmm-backend
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-backend"]
|
||||
restart: always
|
||||
@@ -135,8 +143,9 @@ services:
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
|
||||
# container for django backend
|
||||
# container for django websockets connections
|
||||
tactical-websockets:
|
||||
container_name: trmm-websockets
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-websockets"]
|
||||
restart: always
|
||||
@@ -150,8 +159,9 @@ services:
|
||||
- tactical-postgres
|
||||
- tactical-backend
|
||||
|
||||
tactical-nginx:
|
||||
# container for tactical reverse proxy
|
||||
tactical-nginx:
|
||||
container_name: trmm-nginx
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -171,6 +181,7 @@ services:
|
||||
|
||||
# container for celery worker service
|
||||
tactical-celery:
|
||||
container_name: trmm-celery
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-celery"]
|
||||
restart: always
|
||||
@@ -186,6 +197,7 @@ services:
|
||||
|
||||
# container for celery beat service
|
||||
tactical-celerybeat:
|
||||
container_name: trmm-celerybeat
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-celerybeat"]
|
||||
restart: always
|
||||
|
||||
@@ -4,7 +4,7 @@ A backup script is provided for quick and easy way to backup all settings into o
|
||||
|
||||
Download the backup script:
|
||||
```bash
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh
|
||||
wget -N https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh
|
||||
```
|
||||
|
||||
From the Web UI, click **Tools > Server Maintenance**
|
||||
|
||||
22
docs/docs/code_signing.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# Code Signing
|
||||
|
||||
*Version added: Tactical RMM v0.6.0 / Agent v1.5.0*
|
||||
|
||||
Tactical RMM agents are now [code signed](https://comodosslstore.com/resources/what-is-microsoft-authenticode-code-signing-certificate/)!
|
||||
|
||||
To get access to code signed agents, you must be a [Github Sponsor](https://github.com/sponsors/wh1te909) with a minumum monthly donation of $50.00
|
||||
|
||||
Once you have become a sponsor, please email **support@amidaware.com** with your Github username (and Discord username if you're on our [Discord](https://discord.gg/upGTkWp))
|
||||
|
||||
Please allow up to 24 hours for a response
|
||||
|
||||
You will then be sent a code signing auth token, which you should enter into Tactical's web UI from *Settings > Code Signing*
|
||||
|
||||
|
||||
## How does it work?
|
||||
|
||||
Everytime you generate an agent or an agent does a self-update, your self-hosted instance sends a request to Tactical's code signing servers with your auth token.
|
||||
|
||||
If the token is valid, the server sends you back a code signed agent. If not, it sends you back the un-signed agent.
|
||||
|
||||
If you think your auth token has been compromised or stolen then please email support or contact wh1te909 on discord to get a new token / invalidate the old one.
|
||||
132
docs/docs/contributing_community_scripts.md
Normal file
@@ -0,0 +1,132 @@
|
||||
## Script Library Naming Conventions
|
||||
|
||||
### File names
|
||||
|
||||
Under `/scripts` the file name should generally follow this format:
|
||||
|
||||
```
|
||||
(Platform)_(Category or Function)_(What It Does).xxx
|
||||
```
|
||||
|
||||
!!!info
|
||||
Although Tactical RMM only has a Windows agent for now, we're planning for a future with more platform support
|
||||
|
||||
Platform for now are:
|
||||
|
||||
```
|
||||
Win
|
||||
OSX
|
||||
Linux
|
||||
iOS
|
||||
Android
|
||||
```
|
||||
|
||||
|
||||
Good filename examples include:
|
||||
|
||||
```
|
||||
Win_Azure_Mars_Cloud_Backup_Status.ps1
|
||||
Win_AzureAD_Check_Connection_Status.ps1
|
||||
Win_Network_DHCP_Set.bat
|
||||
Win_Network_DNS_Set_to_1.1.1.2.ps1
|
||||
```
|
||||
|
||||
!!!info
|
||||
This is so that at a glance you can see if there is already a script with that function, and you can avoid duplication of functionality. If you can improve a script or allow Script Arguments/Parameters update existing if possible
|
||||
|
||||
### Name field (in community_scripts.json)
|
||||
|
||||
Consider they are viewed in 3 different locations:
|
||||
|
||||
Script Manager
|
||||
|
||||
- List View (sortable by any column)
|
||||
|
||||
- Folder View (Grouped by Categories)
|
||||
|
||||
Run or Add script
|
||||
|
||||
- Running scripts manually or adding tasks (or adding in Automation Manager)
|
||||
|
||||
!!!info
|
||||
A good max length is 50-60 chars or less for display in these 3 locations
|
||||
|
||||
Make sure your Name roughly follows the order of file naming as above
|
||||
|
||||
```
|
||||
Category or Function - What It Does
|
||||
```
|
||||
|
||||
Consider how the alphabetic sort will affect display
|
||||
|
||||

|
||||
|
||||
## Making Script Files
|
||||
|
||||
### Good Habits
|
||||
|
||||
- Try and make them fully self-contained.
|
||||
|
||||
- If they pull data from elsewhere, create comment notes at the top with references for others to audit/validate
|
||||
|
||||
- Good folder locations to use for standardized things:
|
||||
```
|
||||
c:\ProgramData\TacticalRMM\
|
||||
c:\ProgramData\TacticalRMM\scripts
|
||||
c:\ProgramData\TacticalRMM\toolbox
|
||||
c:\ProgramData\TacticalRMM\logs
|
||||
c:\ProgramData\TacticalRMM\temp
|
||||
c:\ProgramData\TacticalRMM\
|
||||
```
|
||||
|
||||
- Command Parameters are good. Optional command parameters for extra functions are better.
|
||||
|
||||
- Add standardized Comment headers to scripts (include the first 2, more if appropriate):
|
||||
```powershell
|
||||
<#
|
||||
.Synopsis
|
||||
Short description
|
||||
eg Check IP address
|
||||
.DESCRIPTION
|
||||
Long description
|
||||
eg Checks IP address on all local network adapters, and returns results
|
||||
.EXAMPLE
|
||||
Example of how to use this cmdlet
|
||||
.EXAMPLE
|
||||
Another example of how to use this cmdlet
|
||||
.INPUTS
|
||||
Inputs to this cmdlet (if any)
|
||||
.OUTPUTS
|
||||
Output from this cmdlet (if any)
|
||||
.NOTES
|
||||
General notes
|
||||
eg v1.0
|
||||
.COMPONENT
|
||||
The component this cmdlet belongs to
|
||||
.ROLE
|
||||
The role this cmdlet belongs to
|
||||
.FUNCTIONALITY
|
||||
The functionality that best describes this cmdlet
|
||||
#>
|
||||
```
|
||||
|
||||
### Bad Habits
|
||||
|
||||
- Assumes non-standard configurations
|
||||
|
||||
- Doesn't play well with other community scripts (reused names etc.)
|
||||
|
||||
|
||||
## Useful Reference Script Examples
|
||||
|
||||
RunAsUser (since Tactical RMM runs as system)
|
||||
[https://github.com/wh1te909/tacticalrmm/blob/develop/scripts/Win_Display_Message_To_User.ps1](https://github.com/wh1te909/tacticalrmm/blob/develop/scripts/Win_Display_Message_To_User.ps1)
|
||||
|
||||
Command Paramater Ninja
|
||||
[https://github.com/wh1te909/tacticalrmm/blob/develop/scripts/Win_ScreenConnectAIO.ps1](https://github.com/wh1te909/tacticalrmm/blob/develop/scripts/Win_ScreenConnectAIO.ps1)
|
||||
|
||||
Optional Command Parameters and testing for errors
|
||||
[https://github.com/wh1te909/tacticalrmm/blob/develop/scripts/Win_Rename_Computer.ps1](https://github.com/wh1te909/tacticalrmm/blob/develop/scripts/Win_Rename_Computer.ps1)
|
||||
|
||||
|
||||
|
||||
113
docs/docs/contributing_using_devbox.md
Normal file
@@ -0,0 +1,113 @@
|
||||
|
||||
Hidden docs, needs work
|
||||
|
||||
For local Hyper-v Devbox notes
|
||||
|
||||
From https://raw.githubusercontent.com/silversword411/tacticalrmm-devdocs
|
||||
|
||||
Needs an official install_devbox.sh script
|
||||
|
||||
# Setup local devbox in hyper-v VM
|
||||
|
||||
|
||||
|
||||
|
||||
## Install Ubuntu 20.04 LTS
|
||||
Don't forget to
|
||||
|
||||
```bash
|
||||
sudo apt-get updates && sudo apt-get upgrade
|
||||
```
|
||||
|
||||
### Optional
|
||||
Set all users in sudo group not to require password every time:
|
||||
|
||||
```bash
|
||||
sudo visudo
|
||||
```
|
||||
|
||||
Add this:
|
||||
|
||||
```
|
||||
%sudo ALL=(ALL) NOPASSWD: ALL
|
||||
```
|
||||
|
||||
## Download customized install script and tweak
|
||||
|
||||
Create folder to dump into
|
||||
|
||||
```bash
|
||||
sudo mkdir /rmm
|
||||
sudo chown ${USER}:${USER} -R /rmm
|
||||
cd /rmm
|
||||
```
|
||||
|
||||
Get dev install script
|
||||
```bash
|
||||
wget https://raw.githubusercontent.com/silversword411/tacticalrmm-devdocs/blob/main/install_devbox.sh
|
||||
```
|
||||
|
||||
Edit, and search for `REPLACEMEWITHYOURFORKEDREPOURL`
|
||||
|
||||
and replace with your forked repo URL (example commented out below)
|
||||
|
||||
## Run it
|
||||
|
||||
```bash
|
||||
./install_devbox.sh
|
||||
```
|
||||
## Watch for
|
||||
|
||||

|
||||
|
||||
!!!Note Unlike regular installs, don't worry about the QR code
|
||||
|
||||
## Celebrate
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Misc commands
|
||||
|
||||
### Start mkdocs on dev box
|
||||
|
||||
```bash
|
||||
cd /rmm/api
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade setuptools wheel
|
||||
pip install -r tacticalrmm/requirements-dev.txt
|
||||
cd /rmm/docs
|
||||
mkdocs serve
|
||||
```
|
||||
|
||||
### Running tests locally
|
||||
|
||||
Prep and update
|
||||
|
||||
```bash
|
||||
source /rmm/api/env/bin/activate
|
||||
cd /rmm/api/tacticalrmm
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
Then run tests
|
||||
|
||||
```bash
|
||||
python manage.py test
|
||||
```
|
||||
|
||||
## Misc Notes
|
||||
|
||||
### Spinning up front end web interface in development
|
||||
|
||||
|
||||
```bash
|
||||
cd /web
|
||||
npm run serve
|
||||
```
|
||||
59
docs/docs/contributing_using_docker.md
Normal file
@@ -0,0 +1,59 @@
|
||||
|
||||
|
||||
## Install WSL2
|
||||
|
||||
https://docs.microsoft.com/en-us/windows/wsl/install-win10
|
||||
|
||||
|
||||
## Install Docker Desktop
|
||||
|
||||
https://www.docker.com/products/docker-desktop
|
||||
|
||||
### Configure Docker
|
||||
|
||||
Make sure it doesn't look like this
|
||||

|
||||
|
||||
This is better
|
||||
|
||||

|
||||
|
||||
### Check and make sure WSL is v2 and set Ubuntu as default
|
||||
|
||||
[https://docs.microsoft.com/en-us/windows/wsl/install-win10#set-your-distribution-version-to-wsl-1-or-wsl-2](https://docs.microsoft.com/en-us/windows/wsl/install-win10#set-your-distribution-version-to-wsl-1-or-wsl-2)
|
||||
|
||||

|
||||
|
||||
## Create .env file
|
||||
|
||||
Under .devcontainer duplicate
|
||||
|
||||
```
|
||||
.env.example
|
||||
```
|
||||
|
||||
as
|
||||
|
||||
```
|
||||
.env
|
||||
```
|
||||
|
||||
Customize to your tastes (it doesn't need to be internet configured, just add records in your `hosts` file) eg
|
||||
|
||||
```
|
||||
127.0.0.1 rmm.example.com
|
||||
127.0.0.1 api.example.com
|
||||
127.0.0.1 mesh.example.com
|
||||
```
|
||||
|
||||
## View mkdocks live edits in browser
|
||||
|
||||
Change stuff in `/docs/docs/`
|
||||
|
||||
mkdocs is Exposed on Port: 8005
|
||||
|
||||
Open: [http://rmm.example.com:8005/](http://rmm.example.com:8005/)
|
||||
|
||||
## View django administration
|
||||
|
||||
Open: [http://rmm.example.com:8000/admin/](http://rmm.example.com:8000/admin/)
|
||||
118
docs/docs/contributing_using_vscode.md
Normal file
@@ -0,0 +1,118 @@
|
||||
|
||||
## Getting Started
|
||||
|
||||
### 1. Install vscode
|
||||
[https://code.visualstudio.com/download](https://code.visualstudio.com/download)
|
||||
|
||||
### 2. Fork Project in Github
|
||||
|
||||
This is making a duplicate of the code under your Github that you can edit
|
||||
|
||||
[https://github.com/wh1te909/tacticalrmm](https://github.com/wh1te909/tacticalrmm)
|
||||
|
||||

|
||||
|
||||
### 3. Add your (forked) repo to vscode
|
||||
|
||||
Clone repository
|
||||
|
||||
Login to your Github
|
||||
|
||||
Choose local folder
|
||||
|
||||
#### 3a. Install extra vscode Extensions
|
||||
|
||||
GitLens
|
||||
|
||||
Remote - SSH
|
||||
|
||||
### 4. Open Terminal
|
||||
|
||||
[https://code.visualstudio.com/docs/editor/integrated-terminal](https://code.visualstudio.com/docs/editor/integrated-terminal)
|
||||
|
||||
```
|
||||
Ctrl+`
|
||||
```
|
||||
|
||||
### 5. Configure a remote for your fork (in vscode)
|
||||
|
||||
[https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/configuring-a-remote-for-a-fork](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/configuring-a-remote-for-a-fork)
|
||||
|
||||
Configure your local fork and tell it where the original code repo is so you can compare and merge updates later when official repo is updated
|
||||
|
||||
Check repos
|
||||
|
||||
```
|
||||
git remote -v
|
||||
```
|
||||
|
||||
Add upstream repo
|
||||
|
||||
```
|
||||
git remote add upstream https://github.com/wh1te909/tacticalrmm
|
||||
```
|
||||
|
||||
Confirm changes
|
||||
|
||||
```
|
||||
git remote -v
|
||||
```
|
||||
|
||||
|
||||
### 6. Contribute code
|
||||
|
||||
Make changes to something.
|
||||
|
||||
`Commit` (update something) and notate what you did
|
||||
|
||||
`Push` (from your local vscode to your github fork)
|
||||
|
||||
Open browser and look at your repo (It should reflect your commit)
|
||||
|
||||
|
||||
#### 6a. Request your changes to be pulled into the primary repo (Pull Request)
|
||||
|
||||

|
||||
|
||||
In browser create pull request
|
||||
|
||||
### 7. Sync your local fork
|
||||
|
||||
[https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/syncing-a-fork](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/syncing-a-fork)
|
||||
|
||||
Bring changes from original repo to your local vscode copy so you're current with changes made in original Github repo
|
||||
|
||||

|
||||
|
||||
In VSCode open TERMINAL
|
||||
|
||||
```
|
||||
Ctrl+`
|
||||
```
|
||||
|
||||
Tell git to pull from the GitHub upstream repo all new changes into your local directory
|
||||
|
||||
```
|
||||
git pull --rebase upstream develop
|
||||
```
|
||||
#### 7a. Push your local updated copy to your Github fork
|
||||
|
||||
Then you're `push`ing that updated local repo to your online Github fork
|
||||
|
||||

|
||||
|
||||
### 8. Verify and Repeat
|
||||
|
||||
Check your Github fork in browser, should be up to date now with original. Repeat 6 or 7 as necessary
|
||||
|
||||
## Reference
|
||||
|
||||
### Customizing the Admin Web Interface
|
||||
|
||||
Created using quasar, it's all your .vue files in `web/src/components/modals/agents/RunScript.vue`
|
||||
|
||||
Learn stuff here
|
||||
|
||||
https://quasar.dev/
|
||||
|
||||
|
||||
9
docs/docs/functions/automated_tasks.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Automated Tasks
|
||||
|
||||
## Collector Tasks
|
||||
|
||||
Collector tasks allow saving data from script output directly to a custom field. The collector task will only save the last line of standard output of the script.
|
||||
|
||||
You can create collector tasks by adding it to an Automation Policy or adding it directly to an agent. During creation, select the **Collector** checkbox and select the custom field to save to. You can only save to agent custom fields at this time.
|
||||
|
||||
See [Custom Fields](custom_fields.md) and [Scripting](scripting.md) for more information
|
||||
@@ -1,16 +1,53 @@
|
||||
# Custom Fields
|
||||
|
||||
**Settings > Global Settings > Custom Fields**
|
||||
!!!info
|
||||
v0.5.0 adds support for custom fields to be used in the dashboard and in scripts.
|
||||
|
||||
v0.5.0 adds support for custom fields to be used in scripts.
|
||||
#### Adding Custom Fields
|
||||
|
||||
It also exposes some pre-defined fields that are already in the database.
|
||||
In the dashboard, go to **Settings > Global Settings > Custom Fields** and click **Add Custom Field**.
|
||||
|
||||
Please check the following video for examples until proper docs are written:
|
||||
The following options are available to configure on custom fields:
|
||||
|
||||
[https://www.youtube.com/watch?v=0-5jGGL3FOM](https://www.youtube.com/watch?v=0-5jGGL3FOM)
|
||||
- **Model** - This is the object that the custom field will be added to. The available options are:
|
||||
- Agent
|
||||
- Site
|
||||
- Client
|
||||
- **Name** - Sets the name of the custom field. This will be used to identify the custom field in the dashboard and in scripts.
|
||||
- **Field Type** - Sets the type of field. Below are the allowed types.
|
||||
- Text
|
||||
- Number
|
||||
- Single select dropdown
|
||||
- Multi-select dropdown
|
||||
- Checkbox
|
||||
- DateTime
|
||||
- **Input Options** - *Only available on Single and Multiple-select dropdowns*. Sets the options to choose from.
|
||||
- **Default Value** - If no value is found when looking up the custom field; this value will instead be supplied.
|
||||
- **Required** - This makes the field required when adding new Clients, Sites, and Agents. *If this is set a default value will need to be set as well*
|
||||
- **Hide in Dashboard** - This will not show the custom field in Client, Site, and Agent forms in the dashboard. This is useful if the custom field's value is updated by a collector task and only supplied to scripts.
|
||||
|
||||
#### Using Custom Fields in the Dashboard
|
||||
|
||||
Once the custom fields are added, they will show up in the Client, Site, and Agent Add/Edit forms.
|
||||
|
||||
#### Using Custom Fields in Scripts
|
||||
|
||||
|
||||
Tactical RMM allows for passing various database fields for Clients, Sites, and Agents in scripts. This includes custom fields as well!
|
||||
|
||||
!!!warning
|
||||
The characters within the brackets is case-sensitive!
|
||||
|
||||
In your script's arguments, use the notation `{{client.AV_KEY}}`. This will lookup the client for the agent that the script is running on and find the custom field named `AV_KEY` and replace that with the value.
|
||||
|
||||
The same is also true for `{{site.no_patching}}` and `{{agent.Another Field}}`
|
||||
|
||||
For more information see SCRIPTING PAGE
|
||||
|
||||
#### Populating Custom Fields automatically
|
||||
|
||||
Tactical RMM supports automatically collecting information and saving them directly to custom fields. This is made possible by creating **Collector Tasks**. These are just normal Automated Tasks, but instead they will save the last line of the standard output to the custom field that is selected.
|
||||
|
||||
!!!info
|
||||
To populate a multiple select custom field, return a string with the options separated by a comma `"This,will,be,an,array"`
|
||||
|
||||
For more information See [Collector Tasks](automated_tasks.md#Collector Tasks)
|
||||
|
||||
9
docs/docs/functions/keystore.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Global Key Store
|
||||
|
||||
The key store is used to store values that need to be referenced from multiple scripts. This also allows for easy updating of values since scripts reference the values at runtime.
|
||||
|
||||
To Add/Edit values in the Global Key Store, browse to **Settings > Global Settings > KeyStore**.
|
||||
|
||||
You can reference values from the key store in script arguments by using the {{global.key_name}} syntax.
|
||||
|
||||
See [Scripts](scripting.md) for more information.
|
||||
110
docs/docs/functions/scripting.md
Normal file
@@ -0,0 +1,110 @@
|
||||
# Scripting
|
||||
|
||||
Tactical RMM supports uploading existing scripts or adding new scripts right in the dashboard. Languages supported are:
|
||||
|
||||
- Powershell
|
||||
- Windows Batch
|
||||
- Python
|
||||
|
||||
## Adding Scripts
|
||||
In the dashboard, browse to **Settings > Scripts Manager**. Click the **New** button and select either Upload Script or New Script. The available options for scripts are:
|
||||
|
||||
- **Name** - This identifies the script in the dashboard
|
||||
- **Description** - Optional description for the script
|
||||
- **Category** - Optional way to group similar scripts together.
|
||||
- **Type** - This sets the language of the script. Available options are:
|
||||
- Powershell
|
||||
- Windows Batch
|
||||
- Python
|
||||
- **Script Arguments** - Optional way to set default arguments for scripts. These will autopopulate when running scripts and can be changed at runtime.
|
||||
- **Default Timeout** - Sets the default timeout of the script and will stop script execution if the duration surpasses the configured timeout. Can be changed at script runtime
|
||||
- **Favorite** - Favorites the script.
|
||||
|
||||
## Downloading Scripts
|
||||
|
||||
To download a Tactical RMM Script, click on the script in the Script Manager to select it. Then click the **Download Script** button on the top. You can also right-click on the script and select download
|
||||
|
||||
## Community Script
|
||||
|
||||
These are script that are built into Tactical RMM. They are provided and mantained by the Tactical RMM community. These scripts are updated whenever Tactical RMM is updated and can't be modified or deleted in the dashboard.
|
||||
|
||||
### Hiding Community Scripts
|
||||
You can choose to hide community script throughout the dashboard by opening **Script Manager** and clicking the **Show/Hide Community Scripts** toggle button.
|
||||
|
||||
## Using Scripts
|
||||
|
||||
### Manual run on agent
|
||||
|
||||
In the **Agent Table**, you can right-click on an agent and select **Run Script**. You have the options of:
|
||||
- **Wait for Output** - Runs the script and waits for the script to finish running and displays the output.
|
||||
- **Fire and Forget** - Starts the script and does not wait for output.
|
||||
- **Email Output** - Starts the script and will email the output. Allows for using the default email address in the global settings or adding a new email address.
|
||||
|
||||
There is also an option on the agent context menu called **Run Favorited Script**. This will essentially Fire and Forget the script with default args and timeout.
|
||||
|
||||
### Bulk Run on agents
|
||||
|
||||
Tactical RMM offers a way to run a script on multiple agents at once. Browse to **Tools > Bulk Script** and select the target for the script to run.
|
||||
|
||||
### Automated Tasks
|
||||
|
||||
Tactical RMM allows scheduling tasks to run on agents. This leverages the Windows Task Scheduler and has the same scheduling options.
|
||||
|
||||
See [Automated Tasks](automated_tasks.md) for configuring automated tasks
|
||||
|
||||
### Script Checks
|
||||
|
||||
Scripts can also be run periodically on an agent and trigger an alert if it fails.
|
||||
|
||||
### Alert Failure/Resolve Actions
|
||||
|
||||
Scripts can be triggered when an alert is triggered and resolved. This script will run on any online agent and supports passing the alert information as arguments.
|
||||
|
||||
For configuring **Alert Templates**, see [Alerting](../alerting.md)
|
||||
|
||||
See below for populating dashboard data in scripts and the available options.
|
||||
|
||||
## Using dashboard data in scripts
|
||||
|
||||
Tactical RMM allows passing in dashboard data to scripts as arguments. The below powershell arguments will get the client name of the agent and also the agent's public IP address
|
||||
|
||||
```
|
||||
-ClientName {{client.name}} -PublicIP {{agent.public_ip}}
|
||||
```
|
||||
|
||||
!!!info
|
||||
Script variables are case-sensitive!
|
||||
|
||||
See a full list of available options [Here](../script_variables.md)
|
||||
|
||||
### Getting Custom Field values
|
||||
|
||||
Tactical RMM supports pulling data from custom fields using the {{model.custom_field_name}} syntax.
|
||||
|
||||
See [Using Custom Fields in Scripts](custom_fields.md#Using Custom Fields in Scripts)
|
||||
|
||||
### Getting values from the Global Keystore
|
||||
|
||||
Tactical RMM supports getting values from the global key store using the {{global.key_name}} syntax
|
||||
|
||||
See [Global Keystore](keystore.md).
|
||||
|
||||
### Example Powershell Script
|
||||
|
||||
The below script takes five named values. The arguments will look like this: `-SiteName {{site.name}} -ClientName {{client.name}} -PublicIP {{agent.public_ip}} -CustomField {{client.AV_KEY}} -Global {{global.API_KEY}}`
|
||||
|
||||
```powershell
|
||||
param (
|
||||
[string] $SiteName,
|
||||
[string] $ClientName,
|
||||
[string] $PublicIp,
|
||||
[string] $CustomField,
|
||||
[string] $Global
|
||||
)
|
||||
|
||||
Write-Output "Site: $SiteName"
|
||||
Write-Output "Client: $ClientName"
|
||||
Write-Output "Public IP: $PublicIp"
|
||||
Write-Output "Custom Fields: $CustomField"
|
||||
Write-Output "Global: $Global"
|
||||
```
|
||||
BIN
docs/docs/images/celebrate.gif
Normal file
|
After Width: | Height: | Size: 375 KiB |
BIN
docs/docs/images/community_scripts_name_field_example1.png
Normal file
|
After Width: | Height: | Size: 17 KiB |
BIN
docs/docs/images/docker_WSL2_distros_missing.png
Normal file
|
After Width: | Height: | Size: 63 KiB |
BIN
docs/docs/images/docker_with_ubuntu-20.04.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
docs/docs/images/installcomplete.png
Normal file
|
After Width: | Height: | Size: 16 KiB |
BIN
docs/docs/images/trmm_contribute-notice.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
docs/docs/images/trmm_need_sync_local_fork.png
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
docs/docs/images/trmm_vscode_git_pending.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
docs/docs/images/vscode-forkit.png
Normal file
|
After Width: | Height: | Size: 9.9 KiB |
BIN
docs/docs/images/wls2_upgrade_and_set_default.png
Normal file
|
After Width: | Height: | Size: 29 KiB |
@@ -10,8 +10,6 @@ It uses an [agent](https://github.com/wh1te909/rmmagent) written in Golang and i
|
||||
|
||||
## [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||
|
||||
*Tactical RMM is currently in alpha and subject to breaking changes. Use in production at your own risk.*
|
||||
|
||||
## Features
|
||||
|
||||
- Teamviewer-like remote desktop control
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Installing an agent
|
||||
|
||||
!!!warning
|
||||
If you don't want to deal with AV flagging/deleting your agents, check the instructions for getting [code signed agents](code_signing.md)<br/><br />
|
||||
You must add antivirus exlusions for the tactical agent.<br/>
|
||||
Any decent AV will flag the agent as a virus, since it technically is one due to the nature of this software.<br/>
|
||||
Adding the following exlucions will make sure everything works, including agent update:<br/>
|
||||
@@ -63,4 +64,9 @@ Will print very verbose logging during agent install. Useful for troubleshooting
|
||||
```
|
||||
-silent
|
||||
```
|
||||
This will not popup any message boxes during install, either any error messages or the "Installation was successfull" message box that pops up at the end of a successfull install.
|
||||
This will not popup any message boxes during install, either any error messages or the "Installation was successfull" message box that pops up at the end of a successfull install.
|
||||
|
||||
```
|
||||
-proxy "http://proxyserver:port"
|
||||
```
|
||||
Use a http proxy
|
||||
@@ -7,7 +7,8 @@
|
||||
The provided install script assumes a fresh server with no software installed on it. Attempting to run it on an existing server with other services **will** break things and the install will fail.<br/><br/>
|
||||
The install script has been tested on the following public cloud providers: DigitalOcean, Linode, Vultr, BuyVM (highly recommended), Hetzner, AWS, Google Cloud and Azure, as well as behind NAT on Hyper-V, Proxmox and ESXi.
|
||||
|
||||
- A real domain is needed to generate a Let's Encrypt wildcard cert. <br/>If you cannot afford to purchase a domain ($12 a year) then you can get one for free at [freenom.com](https://www.freenom.com/)<br/><br/>
|
||||
- A real (internet resolvable) domain is needed to generate a Let's Encrypt wildcard cert. <br/>If you cannot afford to purchase a domain ($12 a year) then you can get one for free at [freenom.com](https://www.freenom.com/)
|
||||
- example.local is __NOT__ a real domain. No you [don't have to expose your server](faq.md#can-i-run-tactical-rmm-locally-behind-nat-without-exposing-anything-to-the-internet) to the internet<br/><br/>
|
||||
|
||||
- A TOTP based authenticator app. Some popular ones are Google Authenticator, Authy and Microsoft Authenticator.<br/><br/>
|
||||
|
||||
@@ -128,3 +129,6 @@ Copy the url for the meshagent exe (`https://mesh.example.com/agentinvite?c=....
|
||||
Navigate to `https://rmm.example.com` and login with the username/password you created during install.<br/><br/>
|
||||
Once logged in, you will be redirected to the initial setup page.<br/><br/>
|
||||
Create your first client/site, choose the default timezone and then upload the mesh agent you just downloaded.
|
||||
|
||||
!!!note
|
||||
Though it is an unsupported configuration, if you are using HAProxy this might be of use to you [HAProxy Configuration Notes](unsupported_HAProxy.md)
|
||||
@@ -48,18 +48,27 @@ Change offline time on all agents to 5 minutes
|
||||
python manage.py bulk_change_checkin --offline --all 5
|
||||
```
|
||||
|
||||
Change overdue time on all agents to 10 minutes
|
||||
```bash
|
||||
python manage.py bulk_change_checkin --overdue --all 10
|
||||
```
|
||||
|
||||
Change overdue time on all agents in client named *Example Client* to 12 minutes
|
||||
```bash
|
||||
python manage.py bulk_change_checkin --overdue --client "Example Client" 12
|
||||
```
|
||||
|
||||
Change offline time on all agents in site named *Example Site* to 2 minutes
|
||||
```bash
|
||||
python manage.py bulk_change_checkin --offline --site "Example Site" 2
|
||||
```
|
||||
|
||||
Change offline time on all agents in client named *Example Client* to 12 minutes
|
||||
```bash
|
||||
python manage.py bulk_change_checkin --offline --client "Example Client" 12
|
||||
```
|
||||
|
||||
Change overdue time on all agents to 10 minutes
|
||||
```bash
|
||||
python manage.py bulk_change_checkin --overdue --all 10
|
||||
```
|
||||
|
||||
Change overdue time on all agents in site named *Example Site* to 4 minutes
|
||||
```bash
|
||||
python manage.py bulk_change_checkin --overdue --site "Example Site" 4
|
||||
```
|
||||
|
||||
Change overdue time on all agents in client named *Example Client* to 14 minutes
|
||||
```bash
|
||||
python manage.py bulk_change_checkin --overdue --client "Example Client" 14
|
||||
```
|
||||
|
||||
@@ -4,7 +4,9 @@
|
||||
It is currently not possible to restore to a different domain/subdomain, only to a different physical or virtual server.
|
||||
|
||||
!!!danger
|
||||
You must update your old RMM to the latest version using the `update.sh` script before attempting to restore.
|
||||
The restore script will always restore to the latest available RMM version on github.
|
||||
|
||||
Make sure you update your old RMM to the latest version using the `update.sh` script and then run a fresh backup to use with this restore script.
|
||||
#### Prepare the new server
|
||||
Create the same exact linux user account as you did when you installed the original server.
|
||||
|
||||
|
||||
39
docs/docs/script_variables.md
Normal file
@@ -0,0 +1,39 @@
|
||||
# Script Variables
|
||||
|
||||
Tactical RMM allows passing dashboard data into script as arguments. This uses the syntax `{{client.name}}`.
|
||||
|
||||
See below for the available options.
|
||||
|
||||
## Agent
|
||||
|
||||
- **{{agent.version}}** - Tactical RMM agent version
|
||||
- **{{agent.operating_system}}** - Agent operating system example: *Windows 10 Pro, 64 bit (build 19042.928)*
|
||||
- **{{agent.plat}}** - Will show the platform example: *windows*
|
||||
- **{{agent.hostname}}** - The hostname of the agent
|
||||
- **{{agent.public_ip}}** - Public IP address of agent
|
||||
- **{{agent.total_ram}}** - Total RAM on agent. Returns an integer - example: *16*
|
||||
- **{{agent.boot_time}}** - Uptime of agent. Returns unix timestamp. example: *1619439603.0*
|
||||
- **{{agent.logged_in_user}}** - Username of logged in user
|
||||
- **{{agent.monitoring_type}}** - Returns a string of *workstation* or *server*
|
||||
- **{{agent.description}}** - Description of agent in dashboard
|
||||
- **{{agent.mesh_node_id}}** - The mesh node id used for linking the tactical agent to mesh.
|
||||
- **{{agent.choco_installed}}** - Boolean to see if Chocolatey is installed
|
||||
- **{{agent.patches_last_installed}}** - The date that patches were last installed by Tactical RMM.
|
||||
- **{{agent.needs_reboot}}** - Returns true if the agent needs a reboot
|
||||
- **{{agent.time_zone}}** - Returns timezone configured on agent
|
||||
- **{{agent.maintenance_mode}}** - Returns true if agent is in maintenance mode
|
||||
|
||||
## Client
|
||||
- **{{client.name}}** - Returns name of client
|
||||
|
||||
## Site
|
||||
- **{{site.name}}** - Returns name of Site
|
||||
|
||||
## Alert
|
||||
|
||||
!!!info
|
||||
Only available in failure and resolve actions on alert templates!
|
||||
|
||||
- **{{alert.alert_time}}** - Time of the alert
|
||||
- **{{alert.message}}** - Alert message
|
||||
- **{{alert.severity}}** - Severity of the alert *info, warning, or error*
|
||||
@@ -30,6 +30,9 @@ If you have agents that are relatively old, you will need to uninstall them manu
|
||||
|
||||
#### Agents not checking in or showing up / General agent issues
|
||||
|
||||
First, reload NATS from tactical's web UI:<br />
|
||||
*Tools > Server Maintenance > Reload Nats Configuration*
|
||||
|
||||
Open CMD as admin on the problem computer and stop the agent services:
|
||||
|
||||
```cmd
|
||||
@@ -51,9 +54,9 @@ This will print out a ton of info. You should be able to see the error from the
|
||||
|
||||
Please then copy/paste the logs and post them either in our [Discord support chat](https://discord.gg/upGTkWp), or create a [github issue](https://github.com/wh1te909/tacticalrmm/issues).
|
||||
|
||||
<br/>
|
||||
If all else fails, simply uninstall the agent either from control panel or silently with `"C:\Program Files\TacticalAgent\unins000.exe" /VERYSILENT` and then reinstall the agent.
|
||||
|
||||
#### Web UI frozen or not loading / website errors / general errors
|
||||
#### All other errors
|
||||
|
||||
First, run the [update script](update_server.md#updating-to-the-latest-rmm-version) with the `--force` flag. <br/>This will fix permissions and reinstall python/node packages that might have gotten corrupted.
|
||||
|
||||
@@ -61,10 +64,15 @@ First, run the [update script](update_server.md#updating-to-the-latest-rmm-versi
|
||||
./update.sh --force
|
||||
```
|
||||
|
||||
Check the debug log from the web UI: **File > Debug Log**
|
||||
|
||||
Open your browser's dev tools (ctrl + shift + j on chrome) and check the Console tab for any errors
|
||||
|
||||
Check all the systemd services that the rmm uses to function and check to make sure they're all active/running and enabled:
|
||||
|
||||
```bash
|
||||
sudo systemctl status rmm
|
||||
sudo systemctl status daphne
|
||||
sudo systemctl status celery
|
||||
sudo systemctl status celerybeat
|
||||
sudo systemctl status nginx
|
||||
|
||||
119
docs/docs/unsupported_HAProxy.md
Normal file
@@ -0,0 +1,119 @@
|
||||
# HAProxy
|
||||
|
||||
!!!note
|
||||
This is not a supported configuration by Tactical RMM, but it's provided here for your reference.
|
||||
|
||||
Check/Change the mesh central config.json, some of the values may be set already, CertUrl must be changed to point to the HAProxy server.
|
||||
|
||||
## Meshcentral Adjustment
|
||||
|
||||
Credit to [@bradhawkins](https://github.com/bradhawkins85)
|
||||
|
||||
Edit Meshcentral config
|
||||
|
||||
```bash
|
||||
nano /meshcentral/meshcentral-data/config.json
|
||||
```
|
||||
|
||||
Insert this (modify `HAProxyIP` to your network)
|
||||
|
||||
```
|
||||
{
|
||||
"settings": {
|
||||
"Port": 4430,
|
||||
"AliasPort": 443,
|
||||
"RedirPort": 800,
|
||||
"TlsOffload": "127.0.0.1",
|
||||
},
|
||||
"domains": {
|
||||
"": {
|
||||
"CertUrl": "https://HAProxyIP:443/",
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Restart meshcentral
|
||||
|
||||
```bash
|
||||
service meshcentral restart
|
||||
```
|
||||
|
||||
## HAProxy Config
|
||||
|
||||
The order of use_backend is important `Tactical-Mesh-WebSocket_ipvANY` must be before `Tactical-Mesh_ipvANY`
|
||||
The values of `timeout connect`, `timeout server`, `timeout tunnel` in `Tactical-Mesh-WebSocket` have been configured to maintain a stable agent connection, however you may need to adjust these values to suit your environment.
|
||||
|
||||
```
|
||||
frontend HTTPS-merged
|
||||
bind 0.0.0.0:443 name 0.0.0.0:443 ssl crt-list /var/etc/haproxy/HTTPS.crt_list #ADJUST THIS TO YOUR OWN SSL CERTIFICATES
|
||||
mode http
|
||||
log global
|
||||
option socket-stats
|
||||
option dontlognull
|
||||
option http-server-close
|
||||
option forwardfor
|
||||
acl https ssl_fc
|
||||
http-request set-header X-Forwarded-Proto http if !https
|
||||
http-request set-header X-Forwarded-Proto https if https
|
||||
timeout client 30000
|
||||
acl RMM var(txn.txnhost) -m sub -i rmm.example.com
|
||||
acl aclcrt_RMM var(txn.txnhost) -m reg -i ^([^\.]*)\.example\.com(:([0-9]){1,5})?$
|
||||
acl API var(txn.txnhost) -m sub -i api.example.com
|
||||
acl aclcrt_API var(txn.txnhost) -m reg -i ^([^\.]*)\.example\.com(:([0-9]){1,5})?$
|
||||
acl is_websocket hdr(Upgrade) -i WebSocket
|
||||
acl is_mesh var(txn.txnhost) -m beg -i mesh.example.com
|
||||
acl aclcrt_MESH-WebSocket var(txn.txnhost) -m reg -i ^([^\.]*)\.example\.com(:([0-9]){1,5})?$
|
||||
acl MESH var(txn.txnhost) -m sub -i mesh.example.com
|
||||
acl aclcrt_MESH var(txn.txnhost) -m reg -i ^([^\.]*)\.example\.com(:([0-9]){1,5})?$
|
||||
#PUT OTHER USE_BACKEND IN HERE
|
||||
use_backend Tactical_ipvANY if RMM aclcrt_RMM
|
||||
use_backend Tactical_ipvANY if API aclcrt_API
|
||||
use_backend Tactical-Mesh-WebSocket_ipvANY if is_websocket is_mesh aclcrt_MESH-WebSocket
|
||||
use_backend Tactical-Mesh_ipvANY if MESH aclcrt_MESH
|
||||
|
||||
frontend http-to-https
|
||||
bind 0.0.0.0:80
|
||||
mode http
|
||||
log global
|
||||
option http-keep-alive
|
||||
timeout client 30000
|
||||
http-request redirect scheme https
|
||||
|
||||
|
||||
backend Tactical_ipvANY
|
||||
mode http
|
||||
id 100
|
||||
log global
|
||||
timeout connect 30000
|
||||
timeout server 30000
|
||||
retries 3
|
||||
option httpchk GET /
|
||||
server tactical 192.168.10.123:443 id 101 ssl check inter 1000 verify none
|
||||
|
||||
|
||||
backend Tactical-Mesh-WebSocket_ipvANY
|
||||
mode http
|
||||
id 113
|
||||
log global
|
||||
timeout connect 3000
|
||||
timeout server 3000
|
||||
retries 3
|
||||
timeout tunnel 3600000
|
||||
http-request add-header X-Forwarded-Host %[req.hdr(Host)]
|
||||
http-request add-header X-Forwarded-Proto https
|
||||
server tactical 192.168.10.123:443 id 101 ssl verify none
|
||||
|
||||
backend Tactical-Mesh_ipvANY
|
||||
mode http
|
||||
id 112
|
||||
log global
|
||||
timeout connect 15000
|
||||
timeout server 15000
|
||||
retries 3
|
||||
option httpchk GET /
|
||||
timeout tunnel 15000
|
||||
http-request add-header X-Forwarded-Host %[req.hdr(Host)]
|
||||
http-request add-header X-Forwarded-Proto https
|
||||
server tactical 192.168.10.123:443 id 101 ssl check inter 1000 verify none
|
||||
```
|
||||
@@ -2,6 +2,7 @@ site_name: "Tactical RMM Documentation"
|
||||
nav:
|
||||
- Home: index.md
|
||||
- Sponsor: sponsor.md
|
||||
- Code Signing: code_signing.md
|
||||
- RMM Installation:
|
||||
- "Traditional Install": install_server.md
|
||||
- "Docker Install": install_docker.md
|
||||
@@ -11,6 +12,9 @@ nav:
|
||||
- "Updating the RMM (Docker)": update_docker.md
|
||||
- "Updating Agents": update_agents.md
|
||||
- Functionality:
|
||||
- "Automated Tasks": functions/automated_tasks.md
|
||||
- "Scripting": functions/scripting.md
|
||||
- "Global Keystore": functions/keystore.md
|
||||
- "Custom Fields": functions/custom_fields.md
|
||||
- "Remote Background": functions/remote_bg.md
|
||||
- "Maintenance Mode": functions/maintenance_mode.md
|
||||
@@ -23,7 +27,10 @@ nav:
|
||||
- FAQ: faq.md
|
||||
- Management Commands: management_cmds.md
|
||||
- MeshCentral Integration: mesh_integration.md
|
||||
- Contributing: contributing.md
|
||||
- Contributing:
|
||||
- "Contributing to Docs": contributing.md
|
||||
- "Contributing using VSCode": contributing_using_vscode.md
|
||||
- "Contributing to Community Scripts": contributing_community_scripts.md
|
||||
- License: license.md
|
||||
site_description: "A remote monitoring and management tool"
|
||||
site_author: "wh1te909"
|
||||
|
||||
13
install.sh
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="45"
|
||||
SCRIPT_VERSION="47"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh'
|
||||
|
||||
sudo apt install -y curl wget dirmngr gnupg lsb-release
|
||||
@@ -167,11 +167,11 @@ sudo chmod 775 -R /etc/letsencrypt
|
||||
print_green 'Downloading NATS'
|
||||
|
||||
nats_tmp=$(mktemp -d -t nats-XXXXXXXXXX)
|
||||
wget https://github.com/nats-io/nats-server/releases/download/v2.2.0/nats-server-v2.2.0-linux-amd64.tar.gz -P ${nats_tmp}
|
||||
wget https://github.com/nats-io/nats-server/releases/download/v2.2.2/nats-server-v2.2.2-linux-amd64.tar.gz -P ${nats_tmp}
|
||||
|
||||
tar -xzf ${nats_tmp}/nats-server-v2.2.0-linux-amd64.tar.gz -C ${nats_tmp}
|
||||
tar -xzf ${nats_tmp}/nats-server-v2.2.2-linux-amd64.tar.gz -C ${nats_tmp}
|
||||
|
||||
sudo mv ${nats_tmp}/nats-server-v2.2.0-linux-amd64/nats-server /usr/local/bin/
|
||||
sudo mv ${nats_tmp}/nats-server-v2.2.2-linux-amd64/nats-server /usr/local/bin/
|
||||
sudo chmod +x /usr/local/bin/nats-server
|
||||
sudo chown ${USER}:${USER} /usr/local/bin/nats-server
|
||||
rm -rf ${nats_tmp}
|
||||
@@ -354,12 +354,15 @@ sudo chmod +x /usr/local/bin/nats-api
|
||||
|
||||
print_green 'Installing the backend'
|
||||
|
||||
SETUPTOOLS_VER=$(grep "^SETUPTOOLS_VER" /rmm/api/tacticalrmm/tacticalrmm/settings.py | awk -F'[= "]' '{print $5}')
|
||||
WHEEL_VER=$(grep "^WHEEL_VER" /rmm/api/tacticalrmm/tacticalrmm/settings.py | awk -F'[= "]' '{print $5}')
|
||||
|
||||
cd /rmm/api
|
||||
python3.9 -m venv env
|
||||
source /rmm/api/env/bin/activate
|
||||
cd /rmm/api/tacticalrmm
|
||||
pip install --no-cache-dir --upgrade pip
|
||||
pip install --no-cache-dir setuptools==53.0.0 wheel==0.36.2
|
||||
pip install --no-cache-dir setuptools==${SETUPTOOLS_VER} wheel==${WHEEL_VER}
|
||||
pip install --no-cache-dir -r /rmm/api/tacticalrmm/requirements.txt
|
||||
python manage.py migrate
|
||||
python manage.py collectstatic --no-input
|
||||
|
||||
14
restore.sh
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="23"
|
||||
SCRIPT_VERSION="25"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh'
|
||||
|
||||
sudo apt update
|
||||
@@ -108,11 +108,11 @@ sudo apt update
|
||||
print_green 'Downloading NATS'
|
||||
|
||||
nats_tmp=$(mktemp -d -t nats-XXXXXXXXXX)
|
||||
wget https://github.com/nats-io/nats-server/releases/download/v2.2.0/nats-server-v2.2.0-linux-amd64.tar.gz -P ${nats_tmp}
|
||||
wget https://github.com/nats-io/nats-server/releases/download/v2.2.2/nats-server-v2.2.2-linux-amd64.tar.gz -P ${nats_tmp}
|
||||
|
||||
tar -xzf ${nats_tmp}/nats-server-v2.2.0-linux-amd64.tar.gz -C ${nats_tmp}
|
||||
tar -xzf ${nats_tmp}/nats-server-v2.2.2-linux-amd64.tar.gz -C ${nats_tmp}
|
||||
|
||||
sudo mv ${nats_tmp}/nats-server-v2.2.0-linux-amd64/nats-server /usr/local/bin/
|
||||
sudo mv ${nats_tmp}/nats-server-v2.2.2-linux-amd64/nats-server /usr/local/bin/
|
||||
sudo chmod +x /usr/local/bin/nats-server
|
||||
sudo chown ${USER}:${USER} /usr/local/bin/nats-server
|
||||
rm -rf ${nats_tmp}
|
||||
@@ -286,13 +286,17 @@ sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE tacticalrmm TO ${pgus
|
||||
gzip -d $tmp_dir/postgres/*.psql.gz
|
||||
PGPASSWORD=${pgpw} psql -h localhost -U ${pgusername} -d tacticalrmm -f $tmp_dir/postgres/db*.psql
|
||||
|
||||
SETUPTOOLS_VER=$(grep "^SETUPTOOLS_VER" /rmm/api/tacticalrmm/tacticalrmm/settings.py | awk -F'[= "]' '{print $5}')
|
||||
WHEEL_VER=$(grep "^WHEEL_VER" /rmm/api/tacticalrmm/tacticalrmm/settings.py | awk -F'[= "]' '{print $5}')
|
||||
|
||||
cd /rmm/api
|
||||
python3.9 -m venv env
|
||||
source /rmm/api/env/bin/activate
|
||||
cd /rmm/api/tacticalrmm
|
||||
pip install --no-cache-dir --upgrade pip
|
||||
pip install --no-cache-dir setuptools==53.0.0 wheel==0.36.2
|
||||
pip install --no-cache-dir setuptools==${SETUPTOOLS_VER} wheel==${WHEEL_VER}
|
||||
pip install --no-cache-dir -r /rmm/api/tacticalrmm/requirements.txt
|
||||
python manage.py migrate
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py reload_nats
|
||||
deactivate
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
# Checks local disks for errors reported in event viewer within the last 24 hours
|
||||
|
||||
$ErrorActionPreference= 'silentlycontinue'
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
if (Get-WinEvent -FilterHashtable @{LogName='system';ID='11','9','15','52','129','7','98';Level=2,3;ProviderName='*disk*','*storsvc*','*ntfs*';StartTime=$TimeSpan})
|
||||
|
||||
{
|
||||
Write-Output "Disk errors detected please investigate"
|
||||
Get-WinEvent -FilterHashtable @{LogName='system';ID='11','9','15','52','129','7','98';Level=2,3;ProviderName='*disk*','*storsvc*','*ntfs*';StartTime=$TimeSpan}
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
else
|
||||
{
|
||||
Write-Output "Disks are Healthy"
|
||||
exit 0
|
||||
}
|
||||
|
||||
|
||||
Exit $LASTEXITCODE
|
||||
@@ -1 +0,0 @@
|
||||
Restart-Computer -ComputerName $env:COMPUTERNAME -Force
|
||||
@@ -1 +0,0 @@
|
||||
Stop-Computer -ComputerName $env:COMPUTERNAME -Force
|
||||
@@ -37,7 +37,7 @@ except ImportError:
|
||||
gzip = None
|
||||
GZIP_BASE = object
|
||||
|
||||
__version__ = "2.1.2"
|
||||
__version__ = "2.1.3"
|
||||
|
||||
|
||||
class FakeShutdownEvent(object):
|
||||
@@ -728,7 +728,9 @@ def build_request(url, data=None, headers=None, bump="0", secure=False):
|
||||
)
|
||||
|
||||
headers.update(
|
||||
{"Cache-Control": "no-cache",}
|
||||
{
|
||||
"Cache-Control": "no-cache",
|
||||
}
|
||||
)
|
||||
|
||||
printer("%s %s" % (("GET", "POST")[bool(data)], final_url), debug=True)
|
||||
@@ -840,6 +842,8 @@ class HTTPDownloader(threading.Thread):
|
||||
f.close()
|
||||
except IOError:
|
||||
pass
|
||||
except HTTP_ERRORS:
|
||||
pass
|
||||
|
||||
|
||||
class HTTPUploaderData(object):
|
||||
@@ -907,7 +911,7 @@ class HTTPUploader(threading.Thread):
|
||||
self.request = request
|
||||
self.request.data.start = self.starttime = start
|
||||
self.size = size
|
||||
self.result = None
|
||||
self.result = 0
|
||||
self.timeout = timeout
|
||||
self.i = i
|
||||
|
||||
@@ -944,6 +948,8 @@ class HTTPUploader(threading.Thread):
|
||||
self.result = 0
|
||||
except (IOError, SpeedtestUploadTimeout):
|
||||
self.result = sum(self.request.data.total)
|
||||
except HTTP_ERRORS:
|
||||
self.result = 0
|
||||
|
||||
|
||||
class SpeedtestResults(object):
|
||||
@@ -1159,7 +1165,9 @@ class Speedtest(object):
|
||||
self._best = {}
|
||||
|
||||
self.results = SpeedtestResults(
|
||||
client=self.config["client"], opener=self._opener, secure=secure,
|
||||
client=self.config["client"],
|
||||
opener=self._opener,
|
||||
secure=secure,
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -1233,7 +1241,7 @@ class Speedtest(object):
|
||||
# times = get_attributes_by_tag_name(root, 'times')
|
||||
client = get_attributes_by_tag_name(root, "client")
|
||||
|
||||
ignore_servers = list(map(int, server_config["ignoreids"].split(",")))
|
||||
ignore_servers = [int(i) for i in server_config["ignoreids"].split(",") if i]
|
||||
|
||||
ratio = int(upload["ratio"])
|
||||
upload_max = int(upload["maxchunkcount"])
|
||||
@@ -1,24 +0,0 @@
|
||||
# This will check for Malware, Antispyware, that Windows Defender is Healthy, last scan etc within the last 24 hours
|
||||
|
||||
$ErrorActionPreference= 'silentlycontinue'
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
|
||||
if (Get-WinEvent -FilterHashtable @{LogName='Microsoft-Windows-Windows Defender/Operational';ID='1116','1118','1015','1006','5010','5012','5001','1123';StartTime=$TimeSpan})
|
||||
|
||||
{
|
||||
Write-Output "Virus Found or Issue with Defender"
|
||||
Get-WinEvent -FilterHashtable @{LogName='Microsoft-Windows-Windows Defender/Operational';ID='1116','1118','1015','1006','5010','5012','5001','1123';StartTime=$TimeSpan}
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
else
|
||||
|
||||
{
|
||||
Write-Output "No Virus Found, Defender is Healthy"
|
||||
Get-WinEvent -FilterHashtable @{LogName='Microsoft-Windows-Windows Defender/Operational';ID='1150','1001';StartTime=$TimeSpan}
|
||||
exit 0
|
||||
}
|
||||
|
||||
|
||||
Exit $LASTEXITCODE
|
||||