Compare commits
214 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa8b84a302 | ||
|
|
b987d041b0 | ||
|
|
b62e37307e | ||
|
|
61a59aa6ac | ||
|
|
f79ec27f1d | ||
|
|
b993fe380f | ||
|
|
d974b5f55f | ||
|
|
f21ae93197 | ||
|
|
342ff18be8 | ||
|
|
a8236f69bf | ||
|
|
ab15a2448d | ||
|
|
6ff4d8f558 | ||
|
|
bb04ba528c | ||
|
|
b94a795189 | ||
|
|
9968184733 | ||
|
|
1be6f8f87a | ||
|
|
426821cceb | ||
|
|
4fec0deaf7 | ||
|
|
144ac5b6ce | ||
|
|
97c73786fa | ||
|
|
82e59d7da0 | ||
|
|
b2c10de6af | ||
|
|
d72029c2c6 | ||
|
|
17b9987063 | ||
|
|
fde07da2b7 | ||
|
|
c23bc29511 | ||
|
|
714cad2a52 | ||
|
|
357d5d2fde | ||
|
|
d477cce901 | ||
|
|
eb6af52ad1 | ||
|
|
aae75023a7 | ||
|
|
41dcd4f458 | ||
|
|
4651ae4495 | ||
|
|
ed61e0b0fc | ||
|
|
1eefc6fbf4 | ||
|
|
09ebf2cea2 | ||
|
|
b3b0c4cd65 | ||
|
|
f4b7924e8f | ||
|
|
ea68d38b82 | ||
|
|
dfbaa71132 | ||
|
|
6c328deb08 | ||
|
|
add564d5bf | ||
|
|
fa94acb426 | ||
|
|
6827468f13 | ||
|
|
53fd43868f | ||
|
|
9ced7561c5 | ||
|
|
31d55d3425 | ||
|
|
171d2a5bb9 | ||
|
|
c5d05c1205 | ||
|
|
2973e0559a | ||
|
|
ec27288dcf | ||
|
|
f92e5c7093 | ||
|
|
7c67155c49 | ||
|
|
b102cd4652 | ||
|
|
67f9a48c37 | ||
|
|
a0c8a1ee65 | ||
|
|
7e7d272b06 | ||
|
|
3c642240ae | ||
|
|
b5157fcaf1 | ||
|
|
d1cb42f1bc | ||
|
|
84cde1a16a | ||
|
|
877f5db1ce | ||
|
|
787164e245 | ||
|
|
d77fc5e7c5 | ||
|
|
cca39a67d6 | ||
|
|
a6c9a0431a | ||
|
|
729a80a639 | ||
|
|
31cb3001f6 | ||
|
|
5d0f54a329 | ||
|
|
c8c3f5b5b7 | ||
|
|
ba473ed75a | ||
|
|
7236fd59f8 | ||
|
|
9471e8f1fd | ||
|
|
a2d39b51bb | ||
|
|
2920934b55 | ||
|
|
3f709d448e | ||
|
|
b79f66183f | ||
|
|
8672f57e55 | ||
|
|
1e99c82351 | ||
|
|
1a2ff851f3 | ||
|
|
f1c27c3959 | ||
|
|
b30dac0f15 | ||
|
|
cc79e5cdaf | ||
|
|
d9a3b2f2cb | ||
|
|
479b528d09 | ||
|
|
461fb84fb9 | ||
|
|
bd7685e3fa | ||
|
|
cd98cb64b3 | ||
|
|
0f32a3ec24 | ||
|
|
ca446cac87 | ||
|
|
6ea907ffda | ||
|
|
5287baa70d | ||
|
|
25935fec84 | ||
|
|
e855a063ff | ||
|
|
c726b8c9f0 | ||
|
|
13cb99290e | ||
|
|
cea9413fd1 | ||
|
|
1432853b39 | ||
|
|
6d6c2b86e8 | ||
|
|
77b1d964b5 | ||
|
|
549936fc09 | ||
|
|
c9c32f09c5 | ||
|
|
77f7778d4a | ||
|
|
84b6be9364 | ||
|
|
1e43b55804 | ||
|
|
ba9bdaae0a | ||
|
|
7dfd7bde8e | ||
|
|
5e6c4161d0 | ||
|
|
d75d56dfc9 | ||
|
|
1d9d350091 | ||
|
|
5744053c6f | ||
|
|
65589b6ca2 | ||
|
|
e03a9d1137 | ||
|
|
29f80f2276 | ||
|
|
a9b74aa69b | ||
|
|
63ebfd3210 | ||
|
|
87fa5ff7a6 | ||
|
|
b686b53a9c | ||
|
|
258261dc64 | ||
|
|
9af5c9ead9 | ||
|
|
382654188c | ||
|
|
fa1df082b7 | ||
|
|
5c227d8f80 | ||
|
|
81dabdbfb7 | ||
|
|
91f89f5a33 | ||
|
|
9f92746aa0 | ||
|
|
5d6e6f9441 | ||
|
|
01395a2726 | ||
|
|
465d75c65d | ||
|
|
4634f8927e | ||
|
|
74a287f9fe | ||
|
|
7ff6c79835 | ||
|
|
3629982237 | ||
|
|
ddb610f1bc | ||
|
|
f899905d27 | ||
|
|
3e4531b5c5 | ||
|
|
a9e189e51d | ||
|
|
58ba08a8f3 | ||
|
|
9078ff27d8 | ||
|
|
6f43e61c24 | ||
|
|
4be0d3f212 | ||
|
|
00e47e5a27 | ||
|
|
152e145b32 | ||
|
|
54e55e8f57 | ||
|
|
05b8707f9e | ||
|
|
543e952023 | ||
|
|
6e5f40ea06 | ||
|
|
bbafb0be87 | ||
|
|
1c9c5232fe | ||
|
|
598d79a502 | ||
|
|
37d8360b77 | ||
|
|
82d9ca3317 | ||
|
|
4e4238d486 | ||
|
|
c77dbe44dc | ||
|
|
e03737f15f | ||
|
|
a02629bcd7 | ||
|
|
6c3fc23d78 | ||
|
|
0fe40f9ccb | ||
|
|
9bd7c8edd1 | ||
|
|
83ba480863 | ||
|
|
f158ea25e9 | ||
|
|
0227519eab | ||
|
|
616a9685fa | ||
|
|
fe61b01320 | ||
|
|
7b25144311 | ||
|
|
9d42fbbdd7 | ||
|
|
39ac5b088b | ||
|
|
c14ffd08a0 | ||
|
|
6e1239340b | ||
|
|
a297dc8b3b | ||
|
|
8d4ecc0898 | ||
|
|
eae9c04429 | ||
|
|
a41c48a9c5 | ||
|
|
ff2a94bd9b | ||
|
|
4a1f5558b8 | ||
|
|
608db9889f | ||
|
|
012b697337 | ||
|
|
0580506cf3 | ||
|
|
ff4ab9b661 | ||
|
|
b7ce5fdd3e | ||
|
|
a11e617322 | ||
|
|
d0beac7e2b | ||
|
|
9db497092f | ||
|
|
8eb91c08aa | ||
|
|
ded5437522 | ||
|
|
9348657951 | ||
|
|
bca85933f7 | ||
|
|
c32bb35f1c | ||
|
|
4b84062d62 | ||
|
|
d6d0f8fa17 | ||
|
|
dd72c875d3 | ||
|
|
1a1df50300 | ||
|
|
53cbb527b4 | ||
|
|
8b87b2717e | ||
|
|
1007d6dac7 | ||
|
|
6799fac120 | ||
|
|
558e6288ca | ||
|
|
d9cb73291b | ||
|
|
d0f7be3ac3 | ||
|
|
331e16d3ca | ||
|
|
0db246c311 | ||
|
|
94dc62ff58 | ||
|
|
e68ecf6844 | ||
|
|
5167b0a8c6 | ||
|
|
77e3d3786d | ||
|
|
708d4d39bc | ||
|
|
2a8cda2a1e | ||
|
|
8d783840ad | ||
|
|
abe39d5790 | ||
|
|
d7868e9e5a | ||
|
|
7b84e36e15 | ||
|
|
6cab6d69d8 | ||
|
|
87846d7aef | ||
|
|
2557769c6a |
@@ -1,7 +1,6 @@
|
||||
FROM python:3.9.2-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
ENV WORKSPACE_DIR /workspace
|
||||
ENV TACTICAL_USER tactical
|
||||
@@ -9,14 +8,11 @@ ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 8000
|
||||
EXPOSE 8000 8383
|
||||
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# Copy Go Files
|
||||
COPY --from=golang:1.16 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||
|
||||
# Copy Dev python reqs
|
||||
COPY ./requirements.txt /
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ version: '3.4'
|
||||
|
||||
services:
|
||||
api-dev:
|
||||
container_name: trmm-api-dev
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
@@ -21,9 +22,10 @@ services:
|
||||
- tactical-backend
|
||||
|
||||
app-dev:
|
||||
image: node:12-alpine
|
||||
container_name: trmm-app-dev
|
||||
image: node:14-alpine
|
||||
restart: always
|
||||
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
working_dir: /workspace/web
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
@@ -36,6 +38,7 @@ services:
|
||||
|
||||
# nats
|
||||
nats-dev:
|
||||
container_name: trmm-nats-dev
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -55,6 +58,7 @@ services:
|
||||
|
||||
# meshcentral container
|
||||
meshcentral-dev:
|
||||
container_name: trmm-meshcentral-dev
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -77,6 +81,7 @@ services:
|
||||
|
||||
# mongodb container for meshcentral
|
||||
mongodb-dev:
|
||||
container_name: trmm-mongodb-dev
|
||||
image: mongo:4.4
|
||||
restart: always
|
||||
environment:
|
||||
@@ -92,6 +97,7 @@ services:
|
||||
|
||||
# postgres database for api service
|
||||
postgres-dev:
|
||||
container_name: trmm-postgres-dev
|
||||
image: postgres:13-alpine
|
||||
restart: always
|
||||
environment:
|
||||
@@ -107,6 +113,7 @@ services:
|
||||
|
||||
# redis container for celery tasks
|
||||
redis-dev:
|
||||
container_name: trmm-redis-dev
|
||||
restart: always
|
||||
image: redis:6.0-alpine
|
||||
networks:
|
||||
@@ -115,6 +122,7 @@ services:
|
||||
- tactical-redis
|
||||
|
||||
init-dev:
|
||||
container_name: trmm-init-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
@@ -143,6 +151,7 @@ services:
|
||||
|
||||
# container for celery worker service
|
||||
celery-dev:
|
||||
container_name: trmm-celery-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
@@ -160,6 +169,7 @@ services:
|
||||
|
||||
# container for celery beat service
|
||||
celerybeat-dev:
|
||||
container_name: trmm-celerybeat-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
@@ -175,8 +185,29 @@ services:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
nginx-dev:
|
||||
# container for websockets communication
|
||||
websockets-dev:
|
||||
container_name: trmm-websockets-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-websockets-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-websockets
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
# container for tactical reverse proxy
|
||||
nginx-dev:
|
||||
container_name: trmm-nginx-dev
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
|
||||
@@ -136,10 +136,11 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
||||
webenv="$(cat << EOF
|
||||
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
APP_URL = https://${APP_HOST}
|
||||
APP_URL = "https://${APP_HOST}"
|
||||
DOCKER_BUILD = 1
|
||||
EOF
|
||||
)"
|
||||
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
||||
echo "${webenv}" | tee "${WORKSPACE_DIR}"/web/.env > /dev/null
|
||||
|
||||
# chown everything to tactical user
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||
@@ -150,9 +151,6 @@ EOF
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-api' ]; then
|
||||
cp "${WORKSPACE_DIR}"/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||
chmod +x /usr/local/bin/goversioninfo
|
||||
|
||||
check_tactical_ready
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}"
|
||||
fi
|
||||
@@ -167,3 +165,8 @@ if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-websockets-dev' ]; then
|
||||
check_tactical_ready
|
||||
"${VIRTUAL_ENV}"/bin/daphne tacticalrmm.asgi:application --port 8383 -b 0.0.0.0
|
||||
fi
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||
asyncio-nats-client
|
||||
celery
|
||||
channels
|
||||
Django
|
||||
django-cors-headers
|
||||
django-rest-knox
|
||||
@@ -30,3 +31,5 @@ mkdocs-material
|
||||
pymdown-extensions
|
||||
Pygments
|
||||
mypy
|
||||
pysnooper
|
||||
isort
|
||||
|
||||
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a bug report
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Server Info (please complete the following information):**
|
||||
- OS: [e.g. Ubuntu 20.04, Debian 10]
|
||||
- Browser: [e.g. chrome, safari]
|
||||
- RMM Version (as shown in top left of web UI):
|
||||
|
||||
**Installation Method:**
|
||||
- [ ] Standard
|
||||
- [ ] Docker
|
||||
|
||||
**Agent Info (please complete the following information):**
|
||||
- Agent version (as shown in the 'Summary' tab of the agent from web UI):
|
||||
- Agent OS: [e.g. Win 10 v2004, Server 2012 R2]
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
2
.github/workflows/deploy-docs.yml
vendored
2
.github/workflows/deploy-docs.yml
vendored
@@ -2,7 +2,7 @@ name: Deploy Docs
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- master
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
*Tactical RMM is currently in alpha and subject to breaking changes. Use in production at your own risk.*
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Agent, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
|
||||
admin.site.register(Agent)
|
||||
admin.site.register(RecoveryAction)
|
||||
admin.site.register(Note)
|
||||
admin.site.register(AgentCustomField)
|
||||
|
||||
24
api/tacticalrmm/agents/migrations/0032_agentcustomfield.py
Normal file
24
api/tacticalrmm/agents/migrations/0032_agentcustomfield.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-17 14:45
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0014_customfield'),
|
||||
('agents', '0031_agent_alert_template'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AgentCustomField',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('value', models.TextField(blank=True, null=True)),
|
||||
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_fields', to='agents.agent')),
|
||||
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='agent_fields', to='core.customfield')),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 02:51
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0032_agentcustomfield'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agentcustomfield',
|
||||
name='multiple_value',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 03:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0033_agentcustomfield_multiple_value'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agentcustomfield',
|
||||
name='checkbox_value',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/agents/migrations/0035_auto_20210329_1709.py
Normal file
23
api/tacticalrmm/agents/migrations/0035_auto_20210329_1709.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 17:09
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0034_agentcustomfield_checkbox_value'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='agentcustomfield',
|
||||
old_name='checkbox_value',
|
||||
new_name='bool_value',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='agentcustomfield',
|
||||
old_name='value',
|
||||
new_name='string_value',
|
||||
),
|
||||
]
|
||||
@@ -4,7 +4,7 @@ import re
|
||||
import time
|
||||
from collections import Counter
|
||||
from distutils.version import LooseVersion
|
||||
from typing import Any, Union
|
||||
from typing import Any
|
||||
|
||||
import msgpack
|
||||
import validators
|
||||
@@ -13,12 +13,12 @@ from Crypto.Hash import SHA3_384
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Util.Padding import pad
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from nats.aio.client import Client as NATS
|
||||
from nats.aio.errors import ErrTimeout
|
||||
from packaging import version as pyver
|
||||
|
||||
from core.models import TZ_CHOICES, CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
@@ -110,14 +110,6 @@ class Agent(BaseAuditModel):
|
||||
def client(self):
|
||||
return self.site.client
|
||||
|
||||
@property
|
||||
def has_nats(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.0")
|
||||
|
||||
@property
|
||||
def has_gotasks(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.1")
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
# return the default timezone unless the timezone is explicity set per agent
|
||||
@@ -304,10 +296,13 @@ class Agent(BaseAuditModel):
|
||||
from scripts.models import Script
|
||||
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
|
||||
parsed_args = script.parse_script_args(self, script.shell, args)
|
||||
|
||||
data = {
|
||||
"func": "runscriptfull" if full else "runscript",
|
||||
"timeout": timeout,
|
||||
"script_args": args,
|
||||
"script_args": parsed_args,
|
||||
"payload": {
|
||||
"code": script.code,
|
||||
"shell": script.shell,
|
||||
@@ -656,7 +651,11 @@ class Agent(BaseAuditModel):
|
||||
except ErrTimeout:
|
||||
ret = "timeout"
|
||||
else:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
try:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
ret = str(e)
|
||||
|
||||
await nc.close()
|
||||
return ret
|
||||
@@ -845,3 +844,38 @@ class Note(models.Model):
|
||||
|
||||
def __str__(self):
|
||||
return self.agent.hostname
|
||||
|
||||
|
||||
class AgentCustomField(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="custom_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
field = models.ForeignKey(
|
||||
"core.CustomField",
|
||||
related_name="agent_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
string_value = models.TextField(null=True, blank=True)
|
||||
bool_value = models.BooleanField(blank=True, default=False)
|
||||
multiple_value = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.field
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
if self.field.type == "multiple":
|
||||
return self.multiple_value
|
||||
elif self.field.type == "checkbox":
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
@@ -4,7 +4,7 @@ from rest_framework import serializers
|
||||
from clients.serializers import ClientSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, Note
|
||||
from .models import Agent, AgentCustomField, Note
|
||||
|
||||
|
||||
class AgentSerializer(serializers.ModelSerializer):
|
||||
@@ -119,10 +119,30 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
depth = 2
|
||||
|
||||
|
||||
class AgentCustomFieldSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AgentCustomField
|
||||
fields = (
|
||||
"id",
|
||||
"field",
|
||||
"agent",
|
||||
"value",
|
||||
"string_value",
|
||||
"bool_value",
|
||||
"multiple_value",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"string_value": {"write_only": True},
|
||||
"bool_value": {"write_only": True},
|
||||
"multiple_value": {"write_only": True},
|
||||
}
|
||||
|
||||
|
||||
class AgentEditSerializer(serializers.ModelSerializer):
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
all_timezones = serializers.SerializerMethodField()
|
||||
client = ClientSerializer(read_only=True)
|
||||
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
|
||||
|
||||
def get_all_timezones(self, obj):
|
||||
return pytz.all_timezones
|
||||
@@ -146,6 +166,7 @@ class AgentEditSerializer(serializers.ModelSerializer):
|
||||
"all_timezones",
|
||||
"winupdatepolicy",
|
||||
"policy",
|
||||
"custom_fields",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import json
|
||||
import random
|
||||
import subprocess
|
||||
import tempfile
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
@@ -252,3 +255,48 @@ def run_script_email_results_task(
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
|
||||
def _get_nats_config() -> dict:
|
||||
return {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
}
|
||||
|
||||
|
||||
@app.task
|
||||
def monitor_agents_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ret = [i.agent_id for i in agents if i.status != "online"]
|
||||
config = _get_nats_config()
|
||||
config["agents"] = ret
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", "monitor"]
|
||||
try:
|
||||
subprocess.run(cmd, capture_output=True, timeout=30)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ret = [i.agent_id for i in agents if i.status == "online"]
|
||||
config = _get_nats_config()
|
||||
config["agents"] = ret
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", "wmi"]
|
||||
try:
|
||||
subprocess.run(cmd, capture_output=True, timeout=30)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
@@ -12,7 +12,7 @@ from tacticalrmm.test import TacticalTestCase
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent
|
||||
from .models import Agent, AgentCustomField
|
||||
from .serializers import AgentSerializer
|
||||
from .tasks import auto_self_agent_update_task
|
||||
|
||||
@@ -198,11 +198,6 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_processes(self, mock_ret):
|
||||
agent_old = baker.make_recipe("agents.online_agent", version="1.1.12")
|
||||
url_old = f"/agents/{agent_old.pk}/getprocs/"
|
||||
r = self.client.get(url_old)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
|
||||
url = f"/agents/{agent.pk}/getprocs/"
|
||||
|
||||
@@ -340,6 +335,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"deleteafter": True,
|
||||
"trigger": "once",
|
||||
"name": r.data["task_name"], # type: ignore
|
||||
"year": 2025,
|
||||
@@ -367,9 +363,8 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("os.path.exists")
|
||||
@patch("subprocess.run")
|
||||
def test_install_agent(self, mock_subprocess, mock_file_exists):
|
||||
url = f"/agents/installagent/"
|
||||
def test_install_agent(self, mock_file_exists):
|
||||
url = "/agents/installagent/"
|
||||
|
||||
site = baker.make("clients.Site")
|
||||
data = {
|
||||
@@ -377,38 +372,29 @@ class TestAgentViews(TacticalTestCase):
|
||||
"site": site.id, # type: ignore
|
||||
"arch": "64",
|
||||
"expires": 23,
|
||||
"installMethod": "exe",
|
||||
"installMethod": "manual",
|
||||
"api": "https://api.example.com",
|
||||
"agenttype": "server",
|
||||
"rdp": 1,
|
||||
"ping": 0,
|
||||
"power": 0,
|
||||
"fileName": "rmm-client-site-server.exe",
|
||||
}
|
||||
|
||||
mock_file_exists.return_value = False
|
||||
mock_subprocess.return_value.returncode = 0
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 406)
|
||||
|
||||
mock_file_exists.return_value = True
|
||||
mock_subprocess.return_value.returncode = 1
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 413)
|
||||
|
||||
mock_file_exists.return_value = True
|
||||
mock_subprocess.return_value.returncode = 0
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data["arch"] = "32"
|
||||
mock_subprocess.return_value.returncode = 0
|
||||
mock_file_exists.return_value = False
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 415)
|
||||
|
||||
data["installMethod"] = "manual"
|
||||
data["arch"] = "64"
|
||||
mock_subprocess.return_value.returncode = 0
|
||||
mock_file_exists.return_value = True
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertIn("rdp", r.json()["cmd"])
|
||||
@@ -419,6 +405,9 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertIn("power", r.json()["cmd"])
|
||||
self.assertIn("ping", r.json()["cmd"])
|
||||
|
||||
data["installMethod"] = "powershell"
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@@ -538,6 +527,35 @@ class TestAgentViews(TacticalTestCase):
|
||||
data = WinUpdatePolicySerializer(policy).data
|
||||
self.assertEqual(data["run_time_days"], [2, 3, 6])
|
||||
|
||||
# test adding custom fields
|
||||
field = baker.make("core.CustomField", model="agent", type="number")
|
||||
edit = {
|
||||
"id": self.agent.pk,
|
||||
"site": site.id, # type: ignore
|
||||
"description": "asjdk234andasd",
|
||||
"custom_fields": [{"field": field.id, "string_value": "123"}], # type: ignore
|
||||
}
|
||||
|
||||
r = self.client.patch(url, edit, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(
|
||||
AgentCustomField.objects.filter(agent=self.agent, field=field).exists()
|
||||
)
|
||||
|
||||
# test edit custom field
|
||||
edit = {
|
||||
"id": self.agent.pk,
|
||||
"site": site.id, # type: ignore
|
||||
"description": "asjdk234andasd",
|
||||
"custom_fields": [{"field": field.id, "string_value": "456"}], # type: ignore
|
||||
}
|
||||
|
||||
r = self.client.patch(url, edit, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(
|
||||
AgentCustomField.objects.get(agent=agent, field=field).value,
|
||||
"456",
|
||||
)
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("agents.models.Agent.get_login_token")
|
||||
@@ -825,7 +843,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_agent_counts(self):
|
||||
""" def test_agent_counts(self):
|
||||
url = "/agents/agent_counts/"
|
||||
|
||||
# create some data
|
||||
@@ -852,7 +870,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
self.check_not_authenticated("post", url) """
|
||||
|
||||
def test_agent_maintenance_mode(self):
|
||||
url = "/agents/maintenance/"
|
||||
|
||||
@@ -27,7 +27,6 @@ urlpatterns = [
|
||||
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
|
||||
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),
|
||||
path("bulk/", views.bulk),
|
||||
path("agent_counts/", views.agent_counts),
|
||||
path("maintenance/", views.agent_maintenance),
|
||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||
]
|
||||
|
||||
@@ -18,17 +18,13 @@ from core.models import CoreSettings
|
||||
from logs.models import AuditLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
from tacticalrmm.utils import (
|
||||
generate_installer_exe,
|
||||
get_default_timezone,
|
||||
notify_error,
|
||||
reload_nats,
|
||||
)
|
||||
from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
|
||||
from .models import Agent, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
from .serializers import (
|
||||
AgentCustomFieldSerializer,
|
||||
AgentEditSerializer,
|
||||
AgentHostnameSerializer,
|
||||
AgentOverdueActionSerializer,
|
||||
@@ -69,10 +65,9 @@ def update_agents(request):
|
||||
def ping(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
status = "offline"
|
||||
if agent.has_nats:
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
|
||||
return Response({"name": agent.hostname, "status": status})
|
||||
|
||||
@@ -80,8 +75,7 @@ def ping(request, pk):
|
||||
@api_view(["DELETE"])
|
||||
def uninstall(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
|
||||
name = agent.hostname
|
||||
agent.delete()
|
||||
@@ -89,7 +83,7 @@ def uninstall(request):
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
@api_view(["PATCH", "PUT"])
|
||||
def edit_agent(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||
|
||||
@@ -105,6 +99,29 @@ def edit_agent(request):
|
||||
p_serializer.is_valid(raise_exception=True)
|
||||
p_serializer.save()
|
||||
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["agent"] = agent.id # type: ignore
|
||||
|
||||
if AgentCustomField.objects.filter(
|
||||
field=field["field"], agent=agent.id # type: ignore
|
||||
):
|
||||
value = AgentCustomField.objects.get(
|
||||
field=field["field"], agent=agent.id # type: ignore
|
||||
)
|
||||
serializer = AgentCustomFieldSerializer(
|
||||
instance=value, data=custom_field
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
else:
|
||||
serializer = AgentCustomFieldSerializer(data=custom_field)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -147,9 +164,6 @@ def agent_detail(request, pk):
|
||||
@api_view()
|
||||
def get_processes(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if pyver.parse(agent.version) < pyver.parse("1.2.0"):
|
||||
return notify_error("Requires agent version 1.2.0 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
@@ -159,9 +173,6 @@ def get_processes(request, pk):
|
||||
@api_view()
|
||||
def kill_proc(request, pk, pid):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r = asyncio.run(
|
||||
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
|
||||
)
|
||||
@@ -177,8 +188,6 @@ def kill_proc(request, pk, pid):
|
||||
@api_view()
|
||||
def get_event_log(request, pk, logtype, days):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = 180 if logtype == "Security" else 30
|
||||
data = {
|
||||
"func": "eventlog",
|
||||
@@ -198,8 +207,6 @@ def get_event_log(request, pk, logtype, days):
|
||||
@api_view(["POST"])
|
||||
def send_raw_cmd(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = int(request.data["timeout"])
|
||||
data = {
|
||||
"func": "rawcmd",
|
||||
@@ -296,9 +303,6 @@ class Reboot(APIView):
|
||||
# reboot now
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
@@ -308,8 +312,6 @@ class Reboot(APIView):
|
||||
# reboot later
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
try:
|
||||
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
|
||||
@@ -324,6 +326,7 @@ class Reboot(APIView):
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"deleteafter": True,
|
||||
"trigger": "once",
|
||||
"name": task_name,
|
||||
"year": int(dt.datetime.strftime(obj, "%Y")),
|
||||
@@ -334,9 +337,6 @@ class Reboot(APIView):
|
||||
},
|
||||
}
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.1.2"):
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error(r)
|
||||
@@ -382,19 +382,19 @@ def install_agent(request):
|
||||
)
|
||||
|
||||
if request.data["installMethod"] == "exe":
|
||||
return generate_installer_exe(
|
||||
file_name="rmm-installer.exe",
|
||||
goarch="amd64" if arch == "64" else "386",
|
||||
inno=inno,
|
||||
api=request.data["api"],
|
||||
client_id=client_id,
|
||||
site_id=site_id,
|
||||
atype=request.data["agenttype"],
|
||||
from tacticalrmm.utils import generate_winagent_exe
|
||||
|
||||
return generate_winagent_exe(
|
||||
client=client_id,
|
||||
site=site_id,
|
||||
agent_type=request.data["agenttype"],
|
||||
rdp=request.data["rdp"],
|
||||
ping=request.data["ping"],
|
||||
power=request.data["power"],
|
||||
download_url=download_url,
|
||||
arch=arch,
|
||||
token=token,
|
||||
api=request.data["api"],
|
||||
file_name=request.data["fileName"],
|
||||
)
|
||||
|
||||
elif request.data["installMethod"] == "manual":
|
||||
@@ -561,9 +561,6 @@ def run_script(request):
|
||||
@api_view()
|
||||
def recover_mesh(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
data = {"func": "recover", "payload": {"mode": "mesh"}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=45))
|
||||
if r != "ok":
|
||||
@@ -674,49 +671,6 @@ def bulk(request):
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def agent_counts(request):
|
||||
|
||||
server_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
workstation_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"total_server_count": Agent.objects.filter(
|
||||
monitoring_type="server"
|
||||
).count(),
|
||||
"total_server_offline_count": server_offline_count,
|
||||
"total_workstation_count": Agent.objects.filter(
|
||||
monitoring_type="workstation"
|
||||
).count(),
|
||||
"total_workstation_offline_count": workstation_offline_count,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def agent_maintenance(request):
|
||||
if request.data["type"] == "Client":
|
||||
@@ -743,9 +697,6 @@ def agent_maintenance(request):
|
||||
class WMI(APIView):
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if pyver.parse(agent.version) < pyver.parse("1.1.2"):
|
||||
return notify_error("Requires agent version 1.1.2 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
@@ -112,6 +112,23 @@ class TestAPIv3(TacticalTestCase):
|
||||
{"agent": self.agent.pk, "check_interval": 15},
|
||||
)
|
||||
|
||||
def test_run_checks(self):
|
||||
# force run all checks regardless of interval
|
||||
agent = baker.make_recipe("agents.online_agent")
|
||||
baker.make_recipe("checks.ping_check", agent=agent)
|
||||
baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
baker.make_recipe("checks.cpuload_check", agent=agent)
|
||||
baker.make_recipe("checks.memory_check", agent=agent)
|
||||
baker.make_recipe("checks.eventlog_check", agent=agent)
|
||||
for _ in range(10):
|
||||
baker.make_recipe("checks.script_check", agent=agent)
|
||||
|
||||
url = f"/api/v3/{agent.agent_id}/runchecks/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.json()["agent"], agent.pk)
|
||||
self.assertIsInstance(r.json()["check_interval"], int)
|
||||
self.assertEqual(len(r.json()["checks"]), 15)
|
||||
|
||||
def test_checkin_patch(self):
|
||||
from logs.models import PendingAction
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from . import views
|
||||
urlpatterns = [
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/runchecks/", views.RunChecks.as_view()),
|
||||
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
|
||||
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("meshexe/", views.MeshExe.as_view()),
|
||||
|
||||
@@ -260,6 +260,21 @@ class SupersededWinUpdate(APIView):
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class RunChecks(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
@@ -29,7 +29,6 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
policy = baker.make("automation.Policy")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
|
||||
# test script set to invalid pk
|
||||
data = {"autotask": {"script": 500}}
|
||||
@@ -52,15 +51,6 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test old agent version
|
||||
data = {
|
||||
"autotask": {"script": script.id},
|
||||
"agent": old_agent.id,
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
# test add task to agent
|
||||
data = {
|
||||
"autotask": {
|
||||
@@ -203,13 +193,6 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
nats_cmd.assert_called_with({"func": "runtask", "taskpk": task.id}, wait=False)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
task2 = baker.make("autotasks.AutomatedTask", agent=old_agent)
|
||||
url = f"/tasks/runwintask/{task2.id}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
nats_cmd.assert_not_called()
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
|
||||
|
||||
@@ -34,9 +34,6 @@ class AddAutoTask(APIView):
|
||||
parent = {"policy": policy}
|
||||
else:
|
||||
agent = get_object_or_404(Agent, pk=data["agent"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
parent = {"agent": agent}
|
||||
|
||||
check = None
|
||||
@@ -128,8 +125,5 @@ class AutoTask(APIView):
|
||||
@api_view()
|
||||
def run_task(request, pk):
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
if not task.agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
return Response(f"{task.name} will now be run on {task.agent.hostname}")
|
||||
|
||||
@@ -667,16 +667,7 @@ class Check(BaseAuditModel):
|
||||
body = subject + f" - Average memory usage: {avg}%, {text}"
|
||||
|
||||
elif self.check_type == "winsvc":
|
||||
|
||||
try:
|
||||
status = list(
|
||||
filter(lambda x: x["name"] == self.svc_name, self.agent.services)
|
||||
)[0]["status"]
|
||||
# catch services that don't exist if policy check
|
||||
except:
|
||||
status = "Unknown"
|
||||
|
||||
body = subject + f" - Status: {status.upper()}"
|
||||
body = subject + f" - Status: {self.more_info}"
|
||||
|
||||
elif self.check_type == "eventlog":
|
||||
|
||||
@@ -741,10 +732,7 @@ class Check(BaseAuditModel):
|
||||
elif self.check_type == "memory":
|
||||
body = subject + f" - Average memory usage: {avg}%, {text}"
|
||||
elif self.check_type == "winsvc":
|
||||
status = list(
|
||||
filter(lambda x: x["name"] == self.svc_name, self.agent.services)
|
||||
)[0]["status"]
|
||||
body = subject + f" - Status: {status.upper()}"
|
||||
body = subject + f" - Status: {self.more_info}"
|
||||
elif self.check_type == "eventlog":
|
||||
body = subject
|
||||
|
||||
|
||||
@@ -310,14 +310,8 @@ class TestCheckViews(TacticalTestCase):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_checks(self, nats_cmd):
|
||||
agent = baker.make_recipe("agents.agent", version="1.4.1")
|
||||
agent_old = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
agent_b4_141 = baker.make_recipe("agents.agent", version="1.4.0")
|
||||
|
||||
url = f"/checks/runchecks/{agent_old.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.json(), "Requires agent version 1.1.0 or greater")
|
||||
|
||||
url = f"/checks/runchecks/{agent_b4_141.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -161,8 +161,6 @@ class CheckHistory(APIView):
|
||||
@api_view()
|
||||
def run_checks(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.4.1"):
|
||||
r = asyncio.run(agent.nats_cmd({"func": "runchecks"}, timeout=15))
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Client, Deployment, Site
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
|
||||
admin.site.register(Client)
|
||||
admin.site.register(Site)
|
||||
admin.site.register(Deployment)
|
||||
admin.site.register(ClientCustomField)
|
||||
admin.site.register(SiteCustomField)
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-17 14:45
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0014_customfield'),
|
||||
('clients', '0009_auto_20210212_1408'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='SiteCustomField',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('value', models.TextField(blank=True, null=True)),
|
||||
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='site_fields', to='core.customfield')),
|
||||
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_fields', to='clients.site')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ClientCustomField',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('value', models.TextField(blank=True, null=True)),
|
||||
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_fields', to='clients.client')),
|
||||
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='client_fields', to='core.customfield')),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-21 15:11
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0010_clientcustomfield_sitecustomfield'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='site',
|
||||
unique_together={('client', 'name')},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-26 06:52
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0011_auto_20210321_1511'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='deployment',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 02:51
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0012_deployment_created'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='clientcustomfield',
|
||||
name='multiple_value',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='sitecustomfield',
|
||||
name='multiple_value',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 03:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0013_auto_20210329_0251'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='clientcustomfield',
|
||||
name='checkbox_value',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='sitecustomfield',
|
||||
name='checkbox_value',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 17:09
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0014_auto_20210329_0301'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='clientcustomfield',
|
||||
old_name='checkbox_value',
|
||||
new_name='bool_value',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='clientcustomfield',
|
||||
old_name='value',
|
||||
new_name='string_value',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='sitecustomfield',
|
||||
name='value',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 18:27
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0015_auto_20210329_1709'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='sitecustomfield',
|
||||
old_name='checkbox_value',
|
||||
new_name='bool_value',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='sitecustomfield',
|
||||
name='string_value',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,6 @@
|
||||
import uuid
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
|
||||
from agents.models import Agent
|
||||
@@ -159,6 +160,7 @@ class Site(BaseAuditModel):
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
unique_together = (("client", "name"),)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
@@ -225,6 +227,7 @@ class Deployment(models.Model):
|
||||
)
|
||||
arch = models.CharField(max_length=255, choices=ARCH_CHOICES, default="64")
|
||||
expiry = models.DateTimeField(null=True, blank=True)
|
||||
created = models.DateTimeField(auto_now_add=True, null=True, blank=True)
|
||||
auth_token = models.ForeignKey(
|
||||
"knox.AuthToken", related_name="deploytokens", on_delete=models.CASCADE
|
||||
)
|
||||
@@ -233,3 +236,73 @@ class Deployment(models.Model):
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.client} - {self.site} - {self.mon_type}"
|
||||
|
||||
|
||||
class ClientCustomField(models.Model):
|
||||
client = models.ForeignKey(
|
||||
Client,
|
||||
related_name="custom_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
field = models.ForeignKey(
|
||||
"core.CustomField",
|
||||
related_name="client_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
string_value = models.TextField(null=True, blank=True)
|
||||
bool_value = models.BooleanField(blank=True, default=False)
|
||||
multiple_value = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.field.name
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
if self.field.type == "multiple":
|
||||
return self.multiple_value
|
||||
elif self.field.type == "checkbox":
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
|
||||
class SiteCustomField(models.Model):
|
||||
site = models.ForeignKey(
|
||||
Site,
|
||||
related_name="custom_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
field = models.ForeignKey(
|
||||
"core.CustomField",
|
||||
related_name="site_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
string_value = models.TextField(null=True, blank=True)
|
||||
bool_value = models.BooleanField(blank=True, default=False)
|
||||
multiple_value = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.field.name
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
if self.field.type == "multiple":
|
||||
return self.multiple_value
|
||||
elif self.field.type == "checkbox":
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
@@ -1,42 +1,87 @@
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField, ValidationError
|
||||
|
||||
from .models import Client, Deployment, Site
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
|
||||
|
||||
class SiteCustomFieldSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = SiteCustomField
|
||||
fields = (
|
||||
"id",
|
||||
"field",
|
||||
"site",
|
||||
"value",
|
||||
"string_value",
|
||||
"bool_value",
|
||||
"multiple_value",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"string_value": {"write_only": True},
|
||||
"bool_value": {"write_only": True},
|
||||
"multiple_value": {"write_only": True},
|
||||
}
|
||||
|
||||
|
||||
class SiteSerializer(ModelSerializer):
|
||||
client_name = ReadOnlyField(source="client.name")
|
||||
custom_fields = SiteCustomFieldSerializer(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = "__all__"
|
||||
fields = (
|
||||
"id",
|
||||
"name",
|
||||
"server_policy",
|
||||
"workstation_policy",
|
||||
"alert_template",
|
||||
"client_name",
|
||||
"client",
|
||||
"custom_fields",
|
||||
)
|
||||
|
||||
def validate(self, val):
|
||||
if "name" in val.keys() and "|" in val["name"]:
|
||||
raise ValidationError("Site name cannot contain the | character")
|
||||
|
||||
if self.context:
|
||||
client = Client.objects.get(pk=self.context["clientpk"])
|
||||
if Site.objects.filter(client=client, name=val["name"]).exists():
|
||||
raise ValidationError(f"Site {val['name']} already exists")
|
||||
|
||||
return val
|
||||
|
||||
|
||||
class ClientCustomFieldSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = ClientCustomField
|
||||
fields = (
|
||||
"id",
|
||||
"field",
|
||||
"client",
|
||||
"value",
|
||||
"string_value",
|
||||
"bool_value",
|
||||
"multiple_value",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"string_value": {"write_only": True},
|
||||
"bool_value": {"write_only": True},
|
||||
"multiple_value": {"write_only": True},
|
||||
}
|
||||
|
||||
|
||||
class ClientSerializer(ModelSerializer):
|
||||
sites = SiteSerializer(many=True, read_only=True)
|
||||
custom_fields = ClientCustomFieldSerializer(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = "__all__"
|
||||
fields = (
|
||||
"id",
|
||||
"name",
|
||||
"server_policy",
|
||||
"workstation_policy",
|
||||
"alert_template",
|
||||
"sites",
|
||||
"custom_fields",
|
||||
)
|
||||
|
||||
def validate(self, val):
|
||||
|
||||
if "site" in self.context:
|
||||
if "|" in self.context["site"]:
|
||||
raise ValidationError("Site name cannot contain the | character")
|
||||
if len(self.context["site"]) > 255:
|
||||
raise ValidationError("Site name too long")
|
||||
|
||||
if "name" in val.keys() and "|" in val["name"]:
|
||||
raise ValidationError("Client name cannot contain the | character")
|
||||
|
||||
@@ -83,4 +128,5 @@ class DeploymentSerializer(ModelSerializer):
|
||||
"arch",
|
||||
"expiry",
|
||||
"install_flags",
|
||||
"created",
|
||||
]
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Client, Deployment, Site
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
from .serializers import (
|
||||
ClientSerializer,
|
||||
ClientTreeSerializer,
|
||||
@@ -28,18 +29,29 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = ClientSerializer(clients, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_client(self):
|
||||
url = "/clients/clients/"
|
||||
payload = {"client": "Company 1", "site": "Site 1"}
|
||||
|
||||
# test successfull add client
|
||||
payload = {
|
||||
"client": {"name": "Client1"},
|
||||
"site": {"name": "Site1"},
|
||||
"custom_fields": [],
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload["client"] = "Company1|askd"
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
# test add client with | in name
|
||||
payload = {
|
||||
"client": {"name": "Client2|d"},
|
||||
"site": {"name": "Site1"},
|
||||
"custom_fields": [],
|
||||
}
|
||||
serializer = ClientSerializer(data=payload["client"])
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "Client name cannot contain the | character"
|
||||
):
|
||||
@@ -48,19 +60,22 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
payload = {"client": "Company 156", "site": "Site2|a34"}
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "Site name cannot contain the | character"
|
||||
):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
# test add client with | in Site name
|
||||
payload = {
|
||||
"client": {"name": "Client2"},
|
||||
"site": {"name": "Site1|fds"},
|
||||
"custom_fields": [],
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test unique
|
||||
payload = {"client": "Company 1", "site": "Site 1"}
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
payload = {
|
||||
"client": {"name": "Client1"},
|
||||
"site": {"name": "Site1"},
|
||||
"custom_fields": [],
|
||||
}
|
||||
serializer = ClientSerializer(data=payload["client"])
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "client with this name already exists."
|
||||
):
|
||||
@@ -69,67 +84,129 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test long site name
|
||||
payload = {"client": "Company 2394", "site": "Site123" * 100}
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
with self.assertRaisesMessage(ValidationError, "Site name too long"):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test initial setup
|
||||
payload = {
|
||||
"client": {"client": "Company 4", "site": "HQ"},
|
||||
"initialsetup": True,
|
||||
"client": {"name": "Setup Client"},
|
||||
"site": {"name": "Setup Site"},
|
||||
"timezone": "America/Los_Angeles",
|
||||
"initialsetup": True,
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
# test add with custom fields
|
||||
field = baker.make("core.CustomField", model="client", type="text")
|
||||
payload = {
|
||||
"client": {"name": "Custom Field Client"},
|
||||
"site": {"name": "Setup Site"},
|
||||
"custom_fields": [{"field": field.id, "string_value": "new Value"}], # type: ignore
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
client = Client.objects.get(name="Custom Field Client")
|
||||
self.assertTrue(
|
||||
ClientCustomField.objects.filter(client=client, field=field).exists()
|
||||
)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_client(self):
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
|
||||
url = f"/clients/{client.id}/client/" # type: ignore
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = ClientSerializer(client)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_edit_client(self):
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
client = baker.make("clients.Client", name="OldClientName")
|
||||
|
||||
# test invalid id
|
||||
r = self.client.put("/clients/500/client/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
data = {"id": client.id, "name": "New Name"}
|
||||
|
||||
url = f"/clients/{client.id}/client/"
|
||||
# test successfull edit client
|
||||
data = {"client": {"name": "NewClientName"}, "custom_fields": []}
|
||||
url = f"/clients/{client.id}/client/" # type: ignore
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(Client.objects.filter(name="New Name").exists())
|
||||
self.assertTrue(Client.objects.filter(name="NewClientName").exists())
|
||||
self.assertFalse(Client.objects.filter(name="OldClientName").exists())
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_client(self):
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
site = baker.make("clients.Site", client=client)
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
|
||||
# test invalid id
|
||||
r = self.client.delete("/clients/500/client/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/clients/{client.id}/client/"
|
||||
|
||||
# test deleting with agents under client
|
||||
r = self.client.delete(url, format="json")
|
||||
# test edit client with | in name
|
||||
data = {"client": {"name": "NewClie|ntName"}, "custom_fields": []}
|
||||
url = f"/clients/{client.id}/client/" # type: ignore
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test successful deletion
|
||||
agent.delete()
|
||||
r = self.client.delete(url, format="json")
|
||||
# test add with custom fields new value
|
||||
field = baker.make("core.CustomField", model="client", type="checkbox")
|
||||
payload = {
|
||||
"client": {
|
||||
"id": client.id, # type: ignore
|
||||
"name": "Custom Field Client",
|
||||
},
|
||||
"custom_fields": [{"field": field.id, "bool_value": True}], # type: ignore
|
||||
}
|
||||
r = self.client.put(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertFalse(Client.objects.filter(pk=client.id).exists())
|
||||
self.assertFalse(Site.objects.filter(pk=site.id).exists())
|
||||
|
||||
client = Client.objects.get(name="Custom Field Client")
|
||||
self.assertTrue(
|
||||
ClientCustomField.objects.filter(client=client, field=field).exists()
|
||||
)
|
||||
|
||||
# edit custom field value
|
||||
payload = {
|
||||
"client": {
|
||||
"id": client.id, # type: ignore
|
||||
"name": "Custom Field Client",
|
||||
},
|
||||
"custom_fields": [{"field": field.id, "bool_value": False}], # type: ignore
|
||||
}
|
||||
r = self.client.put(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.assertFalse(
|
||||
ClientCustomField.objects.get(client=client, field=field).value
|
||||
)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
def test_delete_client(self, task1, task2):
|
||||
from agents.models import Agent
|
||||
|
||||
task1.return_value = "ok"
|
||||
task2.return_value = "ok"
|
||||
# setup data
|
||||
client_to_delete = baker.make("clients.Client")
|
||||
client_to_move = baker.make("clients.Client")
|
||||
site_to_move = baker.make("clients.Site", client=client_to_move)
|
||||
agent = baker.make_recipe("agents.agent", site=site_to_move)
|
||||
|
||||
# test invalid id
|
||||
r = self.client.delete("/clients/334/953/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/clients/{client_to_delete.id}/{site_to_move.id}/" # type: ignore
|
||||
|
||||
# test successful deletion
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent_moved = Agent.objects.get(pk=agent.pk)
|
||||
self.assertEqual(agent_moved.site.id, site_to_move.id) # type: ignore
|
||||
self.assertFalse(Client.objects.filter(pk=client_to_delete.id).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_sites(self):
|
||||
# setup data
|
||||
baker.make("clients.Site", _quantity=5)
|
||||
@@ -139,29 +216,31 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = SiteSerializer(sites, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_site(self):
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
client = baker.make("clients.Client")
|
||||
site = baker.make("clients.Site", client=client)
|
||||
|
||||
url = "/clients/sites/"
|
||||
|
||||
# test success add
|
||||
payload = {"client": site.client.id, "name": "LA Office"}
|
||||
payload = {
|
||||
"site": {"client": client.id, "name": "LA Office"}, # type: ignore
|
||||
"custom_fields": [],
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(
|
||||
Site.objects.filter(
|
||||
name="LA Office", client__name=site.client.name
|
||||
).exists()
|
||||
)
|
||||
|
||||
# test with | symbol
|
||||
payload = {"client": site.client.id, "name": "LA Off|ice |*&@#$"}
|
||||
serializer = SiteSerializer(data=payload, context={"clientpk": site.client.id})
|
||||
payload = {
|
||||
"site": {"client": client.id, "name": "LA Office |*&@#$"}, # type: ignore
|
||||
"custom_fields": [],
|
||||
}
|
||||
serializer = SiteSerializer(data=payload["site"])
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "Site name cannot contain the | character"
|
||||
):
|
||||
@@ -171,55 +250,139 @@ class TestClientViews(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test site already exists
|
||||
payload = {"client": site.client.id, "name": "LA Office"}
|
||||
serializer = SiteSerializer(data=payload, context={"clientpk": site.client.id})
|
||||
with self.assertRaisesMessage(ValidationError, "Site LA Office already exists"):
|
||||
payload = {
|
||||
"site": {"client": site.client.id, "name": "LA Office"}, # type: ignore
|
||||
"custom_fields": [],
|
||||
}
|
||||
serializer = SiteSerializer(data=payload["site"])
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "The fields client, name must make a unique set."
|
||||
):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
# test add with custom fields
|
||||
field = baker.make(
|
||||
"core.CustomField",
|
||||
model="site",
|
||||
type="single",
|
||||
options=["one", "two", "three"],
|
||||
)
|
||||
payload = {
|
||||
"site": {"client": client.id, "name": "Custom Field Site"}, # type: ignore
|
||||
"custom_fields": [{"field": field.id, "string_value": "one"}], # type: ignore
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
site = Site.objects.get(name="Custom Field Site")
|
||||
self.assertTrue(SiteCustomField.objects.filter(site=site, field=field).exists())
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_edit_site(self):
|
||||
def test_get_site(self):
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
|
||||
url = f"/clients/sites/{site.id}/" # type: ignore
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = SiteSerializer(site)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_edit_site(self):
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
site = baker.make("clients.Site", client=client)
|
||||
|
||||
# test invalid id
|
||||
r = self.client.put("/clients/500/site/", format="json")
|
||||
r = self.client.put("/clients/sites/688/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
data = {"id": site.id, "name": "New Name", "client": site.client.id}
|
||||
data = {
|
||||
"site": {"client": client.id, "name": "New Site Name"}, # type: ignore
|
||||
"custom_fields": [],
|
||||
}
|
||||
|
||||
url = f"/clients/{site.id}/site/"
|
||||
url = f"/clients/sites/{site.id}/" # type: ignore
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(Site.objects.filter(name="New Name").exists())
|
||||
self.assertTrue(
|
||||
Site.objects.filter(client=client, name="New Site Name").exists()
|
||||
)
|
||||
|
||||
# test add with custom fields new value
|
||||
field = baker.make(
|
||||
"core.CustomField",
|
||||
model="site",
|
||||
type="multiple",
|
||||
options=["one", "two", "three"],
|
||||
)
|
||||
payload = {
|
||||
"site": {
|
||||
"id": site.id, # type: ignore
|
||||
"client": site.client.id, # type: ignore
|
||||
"name": "Custom Field Site",
|
||||
},
|
||||
"custom_fields": [{"field": field.id, "multiple_value": ["two", "three"]}], # type: ignore
|
||||
}
|
||||
r = self.client.put(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
site = Site.objects.get(name="Custom Field Site")
|
||||
self.assertTrue(SiteCustomField.objects.filter(site=site, field=field).exists())
|
||||
|
||||
# edit custom field value
|
||||
payload = {
|
||||
"site": {
|
||||
"id": site.id, # type: ignore
|
||||
"client": client.id, # type: ignore
|
||||
"name": "Custom Field Site",
|
||||
},
|
||||
"custom_fields": [{"field": field.id, "multiple_value": ["one"]}], # type: ignore
|
||||
}
|
||||
r = self.client.put(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.assertTrue(
|
||||
SiteCustomField.objects.get(site=site, field=field).value,
|
||||
["one"],
|
||||
)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_site(self):
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
def test_delete_site(self, task1, task2):
|
||||
from agents.models import Agent
|
||||
|
||||
task1.return_value = "ok"
|
||||
task2.return_value = "ok"
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
client = baker.make("clients.Client")
|
||||
site_to_delete = baker.make("clients.Site", client=client)
|
||||
site_to_move = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.agent", site=site_to_delete)
|
||||
|
||||
# test invalid id
|
||||
r = self.client.delete("/clients/500/site/", format="json")
|
||||
r = self.client.delete("/clients/500/445/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/clients/{site.id}/site/"
|
||||
url = f"/clients/sites/{site_to_delete.id}/{site_to_move.id}/" # type: ignore
|
||||
|
||||
# test deleting with last site under client
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test deletion when agents exist under site
|
||||
baker.make("clients.Site", client=site.client)
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.json(), "A client must have at least 1 site.")
|
||||
|
||||
# test successful deletion
|
||||
agent.delete()
|
||||
site_to_move.client = client # type: ignore
|
||||
site_to_move.save(update_fields=["client"]) # type: ignore
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertFalse(Site.objects.filter(pk=site.id).exists())
|
||||
agent_moved = Agent.objects.get(pk=agent.pk)
|
||||
self.assertEqual(agent_moved.site.id, site_to_move.id) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@@ -233,7 +396,7 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = ClientTreeSerializer(clients, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -245,7 +408,7 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.get(url)
|
||||
serializer = DeploymentSerializer(deployments, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -255,8 +418,8 @@ class TestClientViews(TacticalTestCase):
|
||||
|
||||
url = "/clients/deployments/"
|
||||
payload = {
|
||||
"client": site.client.id,
|
||||
"site": site.id,
|
||||
"client": site.client.id, # type: ignore
|
||||
"site": site.id, # type: ignore
|
||||
"expires": "2037-11-23 18:53",
|
||||
"power": 1,
|
||||
"ping": 0,
|
||||
@@ -284,10 +447,10 @@ class TestClientViews(TacticalTestCase):
|
||||
|
||||
url = "/clients/deployments/"
|
||||
|
||||
url = f"/clients/{deployment.id}/deployment/"
|
||||
url = f"/clients/{deployment.id}/deployment/" # type: ignore
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertFalse(Deployment.objects.filter(pk=deployment.id).exists())
|
||||
self.assertFalse(Deployment.objects.filter(pk=deployment.id).exists()) # type: ignore
|
||||
|
||||
url = "/clients/32348/deployment/"
|
||||
r = self.client.delete(url)
|
||||
@@ -301,7 +464,7 @@ class TestClientViews(TacticalTestCase):
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "invalid")
|
||||
self.assertEqual(r.data, "invalid") # type: ignore
|
||||
|
||||
uid = uuid.uuid4()
|
||||
url = f"/clients/{uid}/deploy/"
|
||||
|
||||
@@ -4,10 +4,12 @@ from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("clients/", views.GetAddClients.as_view()),
|
||||
path("<int:pk>/client/", views.GetUpdateDeleteClient.as_view()),
|
||||
path("<int:pk>/client/", views.GetUpdateClient.as_view()),
|
||||
path("<int:pk>/<int:sitepk>/", views.DeleteClient.as_view()),
|
||||
path("tree/", views.GetClientTree.as_view()),
|
||||
path("sites/", views.GetAddSites.as_view()),
|
||||
path("<int:pk>/site/", views.GetUpdateDeleteSite.as_view()),
|
||||
path("sites/<int:pk>/", views.GetUpdateSite.as_view()),
|
||||
path("sites/<int:pk>/<int:sitepk>/", views.DeleteSite.as_view()),
|
||||
path("deployments/", views.AgentDeployment.as_view()),
|
||||
path("<int:pk>/deployment/", views.AgentDeployment.as_view()),
|
||||
path("<str:uid>/deploy/", views.GenerateAgent.as_view()),
|
||||
|
||||
@@ -6,22 +6,27 @@ import pytz
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from tacticalrmm.utils import generate_installer_exe, notify_error
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import Client, Deployment, Site
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
from .serializers import (
|
||||
ClientCustomFieldSerializer,
|
||||
ClientSerializer,
|
||||
ClientTreeSerializer,
|
||||
DeploymentSerializer,
|
||||
SiteCustomFieldSerializer,
|
||||
SiteSerializer,
|
||||
)
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class GetAddClients(APIView):
|
||||
def get(self, request):
|
||||
@@ -29,45 +34,99 @@ class GetAddClients(APIView):
|
||||
return Response(ClientSerializer(clients, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
# create client
|
||||
client_serializer = ClientSerializer(data=request.data["client"])
|
||||
client_serializer.is_valid(raise_exception=True)
|
||||
client = client_serializer.save()
|
||||
|
||||
if "initialsetup" in request.data:
|
||||
client = {"name": request.data["client"]["client"].strip()}
|
||||
site = {"name": request.data["client"]["site"].strip()}
|
||||
serializer = ClientSerializer(data=client, context=request.data["client"])
|
||||
serializer.is_valid(raise_exception=True)
|
||||
# create site
|
||||
site_serializer = SiteSerializer(
|
||||
data={"client": client.id, "name": request.data["site"]["name"]}
|
||||
)
|
||||
|
||||
# make sure site serializer doesn't return errors and save
|
||||
if site_serializer.is_valid():
|
||||
site_serializer.save()
|
||||
else:
|
||||
# delete client since site serializer was invalid
|
||||
client.delete()
|
||||
site_serializer.is_valid(raise_exception=True)
|
||||
|
||||
if "initialsetup" in request.data.keys():
|
||||
core = CoreSettings.objects.first()
|
||||
core.default_time_zone = request.data["timezone"]
|
||||
core.save(update_fields=["default_time_zone"])
|
||||
else:
|
||||
client = {"name": request.data["client"].strip()}
|
||||
site = {"name": request.data["site"].strip()}
|
||||
serializer = ClientSerializer(data=client, context=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
obj = serializer.save()
|
||||
Site(client=obj, name=site["name"]).save()
|
||||
# save custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
return Response(f"{obj} was added!")
|
||||
custom_field = field
|
||||
custom_field["client"] = client.id
|
||||
|
||||
serializer = ClientCustomFieldSerializer(data=custom_field)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response(f"{client} was added!")
|
||||
|
||||
|
||||
class GetUpdateDeleteClient(APIView):
|
||||
class GetUpdateClient(APIView):
|
||||
def get(self, request, pk):
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
return Response(ClientSerializer(client).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
|
||||
serializer = ClientSerializer(data=request.data, instance=client, partial=True)
|
||||
serializer = ClientSerializer(
|
||||
data=request.data["client"], instance=client, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("The Client was renamed")
|
||||
# update custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["client"] = pk
|
||||
|
||||
if ClientCustomField.objects.filter(field=field["field"], client=pk):
|
||||
value = ClientCustomField.objects.get(
|
||||
field=field["field"], client=pk
|
||||
)
|
||||
serializer = ClientCustomFieldSerializer(
|
||||
instance=value, data=custom_field
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
else:
|
||||
serializer = ClientCustomFieldSerializer(data=custom_field)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("The Client was updated")
|
||||
|
||||
|
||||
class DeleteClient(APIView):
|
||||
def delete(self, request, pk, sitepk):
|
||||
from automation.tasks import generate_all_agent_checks_task
|
||||
|
||||
def delete(self, request, pk):
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
agent_count = Agent.objects.filter(site__client=client).count()
|
||||
if agent_count > 0:
|
||||
agents = Agent.objects.filter(site__client=client)
|
||||
|
||||
if not sitepk:
|
||||
return notify_error(
|
||||
f"Cannot delete {client} while {agent_count} agents exist in it. Move the agents to another client first."
|
||||
"There needs to be a site specified to move existing agents to"
|
||||
)
|
||||
|
||||
site = get_object_or_404(Site, pk=sitepk)
|
||||
agents.update(site=site)
|
||||
|
||||
generate_all_agent_checks_task.delay("workstation", create_tasks=True)
|
||||
generate_all_agent_checks_task.delay("server", create_tasks=True)
|
||||
|
||||
client.delete()
|
||||
return Response(f"{client.name} was deleted!")
|
||||
|
||||
@@ -84,39 +143,90 @@ class GetAddSites(APIView):
|
||||
return Response(SiteSerializer(sites, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
name = request.data["name"].strip()
|
||||
serializer = SiteSerializer(data=request.data["site"])
|
||||
serializer.is_valid(raise_exception=True)
|
||||
site = serializer.save()
|
||||
|
||||
# save custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["site"] = site.id
|
||||
|
||||
serializer = SiteCustomFieldSerializer(data=custom_field)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response(f"Site {site.name} was added!")
|
||||
|
||||
|
||||
class GetUpdateSite(APIView):
|
||||
def get(self, request, pk):
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
return Response(SiteSerializer(site).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
|
||||
if "client" in request.data["site"].keys() and (
|
||||
site.client.id != request.data["site"]["client"]
|
||||
and site.client.sites.count() == 1
|
||||
):
|
||||
return notify_error("A client must have at least one site")
|
||||
|
||||
serializer = SiteSerializer(
|
||||
data={"name": name, "client": request.data["client"]},
|
||||
context={"clientpk": request.data["client"]},
|
||||
instance=site, data=request.data["site"], partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
# update custom field
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["site"] = pk
|
||||
|
||||
if SiteCustomField.objects.filter(field=field["field"], site=pk):
|
||||
value = SiteCustomField.objects.get(field=field["field"], site=pk)
|
||||
serializer = SiteCustomFieldSerializer(
|
||||
instance=value, data=custom_field, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
else:
|
||||
serializer = SiteCustomFieldSerializer(data=custom_field)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("Site was edited!")
|
||||
|
||||
|
||||
class GetUpdateDeleteSite(APIView):
|
||||
def put(self, request, pk):
|
||||
class DeleteSite(APIView):
|
||||
def delete(self, request, pk, sitepk):
|
||||
from automation.tasks import generate_all_agent_checks_task
|
||||
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
serializer = SiteSerializer(instance=site, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
if site.client.sites.count() == 1:
|
||||
return notify_error(f"A client must have at least 1 site.")
|
||||
return notify_error("A client must have at least 1 site.")
|
||||
|
||||
agent_count = Agent.objects.filter(site=site).count()
|
||||
agents = Agent.objects.filter(site=site)
|
||||
|
||||
if agent_count > 0:
|
||||
if not sitepk:
|
||||
return notify_error(
|
||||
f"Cannot delete {site.name} while {agent_count} agents exist in it. Move the agents to another site first."
|
||||
"There needs to be a site specified to move the agents to"
|
||||
)
|
||||
|
||||
agent_site = get_object_or_404(Site, pk=sitepk)
|
||||
|
||||
agents.update(site=agent_site)
|
||||
|
||||
generate_all_agent_checks_task.delay("workstation", create_tasks=True)
|
||||
generate_all_agent_checks_task.delay("server", create_tasks=True)
|
||||
|
||||
site.delete()
|
||||
return Response(f"{site.name} was deleted!")
|
||||
|
||||
@@ -173,6 +283,8 @@ class GenerateAgent(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def get(self, request, uid):
|
||||
from tacticalrmm.utils import generate_winagent_exe
|
||||
|
||||
try:
|
||||
_ = uuid.UUID(uid, version=4)
|
||||
except ValueError:
|
||||
@@ -180,28 +292,22 @@ class GenerateAgent(APIView):
|
||||
|
||||
d = get_object_or_404(Deployment, uid=uid)
|
||||
|
||||
inno = (
|
||||
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||
if d.arch == "64"
|
||||
else f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe"
|
||||
)
|
||||
client = d.client.name.replace(" ", "").lower()
|
||||
site = d.site.name.replace(" ", "").lower()
|
||||
client = re.sub(r"([^a-zA-Z0-9]+)", "", client)
|
||||
site = re.sub(r"([^a-zA-Z0-9]+)", "", site)
|
||||
ext = ".exe" if d.arch == "64" else "-x86.exe"
|
||||
file_name = f"rmm-{client}-{site}-{d.mon_type}{ext}"
|
||||
|
||||
return generate_installer_exe(
|
||||
file_name=f"rmm-{client}-{site}-{d.mon_type}{ext}",
|
||||
goarch="amd64" if d.arch == "64" else "386",
|
||||
inno=inno,
|
||||
api=f"https://{request.get_host()}",
|
||||
client_id=d.client.pk,
|
||||
site_id=d.site.pk,
|
||||
atype=d.mon_type,
|
||||
return generate_winagent_exe(
|
||||
client=d.client.pk,
|
||||
site=d.site.pk,
|
||||
agent_type=d.mon_type,
|
||||
rdp=d.install_flags["rdp"],
|
||||
ping=d.install_flags["ping"],
|
||||
power=d.install_flags["power"],
|
||||
download_url=settings.DL_64 if d.arch == "64" else settings.DL_32,
|
||||
arch=d.arch,
|
||||
token=d.token_key,
|
||||
api=f"https://{request.get_host()}",
|
||||
file_name=file_name,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import CoreSettings
|
||||
from .models import CoreSettings, CustomField
|
||||
|
||||
admin.site.register(CoreSettings)
|
||||
admin.site.register(CustomField)
|
||||
|
||||
79
api/tacticalrmm/core/consumers.py
Normal file
79
api/tacticalrmm/core/consumers.py
Normal file
@@ -0,0 +1,79 @@
|
||||
import asyncio
|
||||
|
||||
from channels.db import database_sync_to_async
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
async def connect(self):
|
||||
|
||||
self.user = self.scope["user"]
|
||||
|
||||
if isinstance(self.user, AnonymousUser):
|
||||
await self.close()
|
||||
|
||||
await self.accept()
|
||||
self.connected = True
|
||||
self.dash_info = asyncio.create_task(self.send_dash_info())
|
||||
|
||||
async def disconnect(self, close_code):
|
||||
|
||||
try:
|
||||
self.dash_info.cancel()
|
||||
except:
|
||||
pass
|
||||
|
||||
self.connected = False
|
||||
await self.close()
|
||||
|
||||
async def receive(self, json_data=None):
|
||||
pass
|
||||
|
||||
@database_sync_to_async
|
||||
def get_dashboard_info(self):
|
||||
server_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
workstation_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
ret = {
|
||||
"total_server_offline_count": server_offline_count,
|
||||
"total_workstation_offline_count": workstation_offline_count,
|
||||
"total_server_count": Agent.objects.filter(
|
||||
monitoring_type="server"
|
||||
).count(),
|
||||
"total_workstation_count": Agent.objects.filter(
|
||||
monitoring_type="workstation"
|
||||
).count(),
|
||||
}
|
||||
return ret
|
||||
|
||||
async def send_dash_info(self):
|
||||
while self.connected:
|
||||
c = await self.get_dashboard_info()
|
||||
await self.send_json(c)
|
||||
await asyncio.sleep(30)
|
||||
Binary file not shown.
@@ -1,5 +0,0 @@
|
||||
module github.com/wh1te909/goinstaller
|
||||
|
||||
go 1.16
|
||||
|
||||
require github.com/josephspurrier/goversioninfo v1.2.0 // indirect
|
||||
@@ -1,10 +0,0 @@
|
||||
github.com/akavel/rsrc v0.8.0 h1:zjWn7ukO9Kc5Q62DOJCcxGpXC18RawVtYAGdz2aLlfw=
|
||||
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/josephspurrier/goversioninfo v1.2.0 h1:tpLHXAxLHKHg/dCU2AAYx08A4m+v9/CWg6+WUvTF4uQ=
|
||||
github.com/josephspurrier/goversioninfo v1.2.0/go.mod h1:AGP2a+Y/OVJZ+s6XM4IwFUpkETwvn0orYurY8qpw1+0=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
@@ -1,17 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity
|
||||
type="win32"
|
||||
name="TacticalRMMInstaller"
|
||||
version="1.0.0.0"
|
||||
processorArchitecture="*"/>
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel
|
||||
level="requireAdministrator"
|
||||
uiAccess="false"/>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
</assembly>
|
||||
@@ -1,186 +0,0 @@
|
||||
//go:generate goversioninfo -icon=onit.ico -manifest=goversioninfo.exe.manifest -gofile=versioninfo.go
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
Inno string
|
||||
Api string
|
||||
Client string
|
||||
Site string
|
||||
Atype string
|
||||
Power string
|
||||
Rdp string
|
||||
Ping string
|
||||
Token string
|
||||
DownloadUrl string
|
||||
)
|
||||
|
||||
var netTransport = &http.Transport{
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 5 * time.Second,
|
||||
}).Dial,
|
||||
TLSHandshakeTimeout: 5 * time.Second,
|
||||
}
|
||||
|
||||
var netClient = &http.Client{
|
||||
Timeout: time.Second * 900,
|
||||
Transport: netTransport,
|
||||
}
|
||||
|
||||
func downloadAgent(filepath string) (err error) {
|
||||
|
||||
out, err := os.Create(filepath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
resp, err := netClient.Get(DownloadUrl)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("Bad response: %s", resp.Status)
|
||||
}
|
||||
|
||||
_, err = io.Copy(out, resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
|
||||
debugLog := flag.String("log", "", "Verbose output")
|
||||
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
|
||||
silent := flag.Bool("silent", false, "Do not popup any message boxes during installation")
|
||||
cert := flag.String("cert", "", "Path to ca.pem")
|
||||
flag.Parse()
|
||||
|
||||
var debug bool = false
|
||||
|
||||
if strings.TrimSpace(strings.ToLower(*debugLog)) == "debug" {
|
||||
debug = true
|
||||
}
|
||||
|
||||
agentBinary := filepath.Join(os.Getenv("windir"), "Temp", Inno)
|
||||
tacrmm := filepath.Join(os.Getenv("PROGRAMFILES"), "TacticalAgent", "tacticalrmm.exe")
|
||||
|
||||
cmdArgs := []string{
|
||||
"-m", "install", "--api", Api, "--client-id",
|
||||
Client, "--site-id", Site, "--agent-type", Atype,
|
||||
"--auth", Token,
|
||||
}
|
||||
|
||||
if debug {
|
||||
cmdArgs = append(cmdArgs, "-log", "debug")
|
||||
}
|
||||
|
||||
if *silent {
|
||||
cmdArgs = append(cmdArgs, "-silent")
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*localMesh)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "-local-mesh", *localMesh)
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*cert)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "-cert", *cert)
|
||||
}
|
||||
|
||||
if Rdp == "1" {
|
||||
cmdArgs = append(cmdArgs, "-rdp")
|
||||
}
|
||||
|
||||
if Ping == "1" {
|
||||
cmdArgs = append(cmdArgs, "-ping")
|
||||
}
|
||||
|
||||
if Power == "1" {
|
||||
cmdArgs = append(cmdArgs, "-power")
|
||||
}
|
||||
|
||||
if debug {
|
||||
fmt.Println("Installer:", agentBinary)
|
||||
fmt.Println("Tactical Agent:", tacrmm)
|
||||
fmt.Println("Download URL:", DownloadUrl)
|
||||
fmt.Println("Install command:", tacrmm, strings.Join(cmdArgs, " "))
|
||||
}
|
||||
|
||||
fmt.Println("Downloading agent...")
|
||||
dl := downloadAgent(agentBinary)
|
||||
if dl != nil {
|
||||
fmt.Println("ERROR: unable to download agent from", DownloadUrl)
|
||||
fmt.Println(dl)
|
||||
os.Exit(1)
|
||||
}
|
||||
defer os.Remove(agentBinary)
|
||||
|
||||
fmt.Println("Extracting files...")
|
||||
winagentCmd := exec.Command(agentBinary, "/VERYSILENT", "/SUPPRESSMSGBOXES")
|
||||
err := winagentCmd.Run()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
time.Sleep(5 * time.Second)
|
||||
|
||||
fmt.Println("Installation starting.")
|
||||
cmd := exec.Command(tacrmm, cmdArgs...)
|
||||
|
||||
cmdReader, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
return
|
||||
}
|
||||
|
||||
cmdErrReader, oerr := cmd.StderrPipe()
|
||||
if oerr != nil {
|
||||
fmt.Fprintln(os.Stderr, oerr)
|
||||
return
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(cmdReader)
|
||||
escanner := bufio.NewScanner(cmdErrReader)
|
||||
go func() {
|
||||
for scanner.Scan() {
|
||||
fmt.Println(scanner.Text())
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
for escanner.Scan() {
|
||||
fmt.Println(escanner.Text())
|
||||
}
|
||||
}()
|
||||
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = cmd.Wait()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
return
|
||||
}
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 48 KiB |
@@ -1,43 +0,0 @@
|
||||
{
|
||||
"FixedFileInfo": {
|
||||
"FileVersion": {
|
||||
"Major": 1,
|
||||
"Minor": 0,
|
||||
"Patch": 0,
|
||||
"Build": 0
|
||||
},
|
||||
"ProductVersion": {
|
||||
"Major": 1,
|
||||
"Minor": 0,
|
||||
"Patch": 0,
|
||||
"Build": 0
|
||||
},
|
||||
"FileFlagsMask": "3f",
|
||||
"FileFlags ": "00",
|
||||
"FileOS": "040004",
|
||||
"FileType": "01",
|
||||
"FileSubType": "00"
|
||||
},
|
||||
"StringFileInfo": {
|
||||
"Comments": "",
|
||||
"CompanyName": "Tactical Techs",
|
||||
"FileDescription": "Tactical RMM Installer",
|
||||
"FileVersion": "v1.0.0.0",
|
||||
"InternalName": "rmm.exe",
|
||||
"LegalCopyright": "Copyright (c) 2020 Tactical Techs",
|
||||
"LegalTrademarks": "",
|
||||
"OriginalFilename": "installer.go",
|
||||
"PrivateBuild": "",
|
||||
"ProductName": "Tactical RMM Installer",
|
||||
"ProductVersion": "v1.0.0.0",
|
||||
"SpecialBuild": ""
|
||||
},
|
||||
"VarFileInfo": {
|
||||
"Translation": {
|
||||
"LangID": "0409",
|
||||
"CharsetID": "04B0"
|
||||
}
|
||||
},
|
||||
"IconPath": "",
|
||||
"ManifestPath": ""
|
||||
}
|
||||
@@ -10,6 +10,8 @@ $ping = pingchange
|
||||
$auth = '"tokenchange"'
|
||||
$downloadlink = 'downloadchange'
|
||||
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
|
||||
$serviceName = 'tacticalagent'
|
||||
If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
|
||||
write-host ('Tactical RMM Is Already Installed')
|
||||
|
||||
27
api/tacticalrmm/core/migrations/0014_customfield.py
Normal file
27
api/tacticalrmm/core/migrations/0014_customfield.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-17 14:45
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0013_coresettings_alert_template'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CustomField',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('order', models.PositiveIntegerField()),
|
||||
('model', models.CharField(choices=[('client', 'Client'), ('site', 'Site'), ('agent', 'Agent')], max_length=25)),
|
||||
('type', models.CharField(choices=[('text', 'Text'), ('number', 'Number'), ('single', 'Single'), ('multiple', 'Multiple'), ('checkbox', 'Checkbox'), ('datetime', 'DateTime')], default='text', max_length=25)),
|
||||
('options', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)),
|
||||
('name', models.TextField(blank=True, null=True)),
|
||||
('default_value', models.TextField(blank=True, null=True)),
|
||||
('required', models.BooleanField(blank=True, default=False)),
|
||||
],
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/core/migrations/0015_auto_20210318_2034.py
Normal file
18
api/tacticalrmm/core/migrations/0015_auto_20210318_2034.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-18 20:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0014_customfield'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='customfield',
|
||||
name='order',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
17
api/tacticalrmm/core/migrations/0016_auto_20210319_1536.py
Normal file
17
api/tacticalrmm/core/migrations/0016_auto_20210319_1536.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-19 15:36
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0015_auto_20210318_2034'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='customfield',
|
||||
unique_together={('model', 'name')},
|
||||
),
|
||||
]
|
||||
24
api/tacticalrmm/core/migrations/0017_auto_20210329_1050.py
Normal file
24
api/tacticalrmm/core/migrations/0017_auto_20210329_1050.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 10:50
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0016_auto_20210319_1536'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='customfield',
|
||||
name='checkbox_value',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='customfield',
|
||||
name='default_values_multiple',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/core/migrations/0018_auto_20210329_1709.py
Normal file
23
api/tacticalrmm/core/migrations/0018_auto_20210329_1709.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 17:09
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0017_auto_20210329_1050'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='customfield',
|
||||
old_name='checkbox_value',
|
||||
new_name='default_value_bool',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='customfield',
|
||||
old_name='default_value',
|
||||
new_name='default_value_string',
|
||||
),
|
||||
]
|
||||
@@ -216,3 +216,53 @@ class CoreSettings(BaseAuditModel):
|
||||
from .serializers import CoreSerializer
|
||||
|
||||
return CoreSerializer(core).data
|
||||
|
||||
|
||||
FIELD_TYPE_CHOICES = (
|
||||
("text", "Text"),
|
||||
("number", "Number"),
|
||||
("single", "Single"),
|
||||
("multiple", "Multiple"),
|
||||
("checkbox", "Checkbox"),
|
||||
("datetime", "DateTime"),
|
||||
)
|
||||
|
||||
MODEL_CHOICES = (("client", "Client"), ("site", "Site"), ("agent", "Agent"))
|
||||
|
||||
|
||||
class CustomField(models.Model):
|
||||
|
||||
order = models.PositiveIntegerField(default=0)
|
||||
model = models.CharField(max_length=25, choices=MODEL_CHOICES)
|
||||
type = models.CharField(max_length=25, choices=FIELD_TYPE_CHOICES, default="text")
|
||||
options = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
name = models.TextField(null=True, blank=True)
|
||||
required = models.BooleanField(blank=True, default=False)
|
||||
default_value_string = models.TextField(null=True, blank=True)
|
||||
default_value_bool = models.BooleanField(default=False)
|
||||
default_values_multiple = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = (("model", "name"),)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def default_value(self):
|
||||
if self.type == "multiple":
|
||||
return self.default_values_multiple
|
||||
elif self.type == "checkbox":
|
||||
return self.default_value_bool
|
||||
else:
|
||||
return self.default_value_string
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from .models import CoreSettings
|
||||
from .models import CoreSettings, CustomField
|
||||
|
||||
|
||||
class CoreSettingsSerializer(serializers.ModelSerializer):
|
||||
@@ -21,3 +21,9 @@ class CoreSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CoreSettings
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class CustomFieldSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CustomField
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,11 +1,39 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker, seq
|
||||
from channels.db import database_sync_to_async
|
||||
from channels.testing import WebsocketCommunicator
|
||||
from model_bakery import baker
|
||||
|
||||
from core.models import CoreSettings
|
||||
from core.tasks import core_maintenance_tasks
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .consumers import DashInfo
|
||||
from .models import CoreSettings, CustomField
|
||||
from .serializers import CustomFieldSerializer
|
||||
from .tasks import core_maintenance_tasks
|
||||
|
||||
|
||||
class TestConsumers(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.authenticate()
|
||||
|
||||
@database_sync_to_async
|
||||
def get_token(self):
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
token = Token.objects.create(user=self.john)
|
||||
return token.key
|
||||
|
||||
async def test_dash_info(self):
|
||||
key = self.get_token()
|
||||
communicator = WebsocketCommunicator(
|
||||
DashInfo.as_asgi(), f"/ws/dashinfo/?access_token={key}"
|
||||
)
|
||||
communicator.scope["user"] = self.john
|
||||
connected, _ = await communicator.connect()
|
||||
assert connected
|
||||
await communicator.disconnect()
|
||||
|
||||
|
||||
class TestCoreTasks(TacticalTestCase):
|
||||
def setUp(self):
|
||||
@@ -42,7 +70,7 @@ class TestCoreTasks(TacticalTestCase):
|
||||
url = "/core/editsettings/"
|
||||
|
||||
# setup
|
||||
policies = baker.make("Policy", _quantity=2)
|
||||
policies = baker.make("automation.Policy", _quantity=2)
|
||||
# test normal request
|
||||
data = {
|
||||
"smtp_from_email": "newexample@example.com",
|
||||
@@ -59,14 +87,14 @@ class TestCoreTasks(TacticalTestCase):
|
||||
|
||||
# test adding policy
|
||||
data = {
|
||||
"workstation_policy": policies[0].id,
|
||||
"server_policy": policies[1].id,
|
||||
"workstation_policy": policies[0].id, # type: ignore
|
||||
"server_policy": policies[1].id, # type: ignore
|
||||
}
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(CoreSettings.objects.first().server_policy.id, policies[1].id)
|
||||
self.assertEqual(CoreSettings.objects.first().server_policy.id, policies[1].id) # type: ignore
|
||||
self.assertEqual(
|
||||
CoreSettings.objects.first().workstation_policy.id, policies[0].id
|
||||
CoreSettings.objects.first().workstation_policy.id, policies[0].id # type: ignore
|
||||
)
|
||||
|
||||
self.assertEqual(generate_all_agent_checks_task.call_count, 2)
|
||||
@@ -128,3 +156,97 @@ class TestCoreTasks(TacticalTestCase):
|
||||
remove_orphaned_win_tasks.assert_called()
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_custom_fields(self):
|
||||
url = "/core/customfields/"
|
||||
|
||||
# setup
|
||||
custom_fields = baker.make("core.CustomField", _quantity=2)
|
||||
|
||||
r = self.client.get(url)
|
||||
serializer = CustomFieldSerializer(custom_fields, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.data), 2) # type: ignore
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_custom_fields_by_model(self):
|
||||
url = "/core/customfields/"
|
||||
|
||||
# setup
|
||||
custom_fields = baker.make("core.CustomField", model="agent", _quantity=5)
|
||||
baker.make("core.CustomField", model="client", _quantity=5)
|
||||
|
||||
# will error if request invalid
|
||||
r = self.client.patch(url, {"invalid": ""})
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
data = {"model": "agent"}
|
||||
r = self.client.patch(url, data)
|
||||
serializer = CustomFieldSerializer(custom_fields, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.data), 5) # type: ignore
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_add_custom_field(self):
|
||||
url = "/core/customfields/"
|
||||
|
||||
data = {"model": "client", "type": "text", "name": "Field"}
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_custom_field(self):
|
||||
# setup
|
||||
custom_field = baker.make("core.CustomField")
|
||||
|
||||
# test not found
|
||||
r = self.client.get("/core/customfields/500/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/core/customfields/{custom_field.id}/" # type: ignore
|
||||
r = self.client.get(url)
|
||||
serializer = CustomFieldSerializer(custom_field)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_update_custom_field(self):
|
||||
# setup
|
||||
custom_field = baker.make("core.CustomField")
|
||||
|
||||
# test not found
|
||||
r = self.client.put("/core/customfields/500/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/core/customfields/{custom_field.id}/" # type: ignore
|
||||
data = {"type": "single", "options": ["ione", "two", "three"]}
|
||||
r = self.client.put(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
new_field = CustomField.objects.get(pk=custom_field.id) # type: ignore
|
||||
self.assertEqual(new_field.type, data["type"])
|
||||
self.assertEqual(new_field.options, data["options"])
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_custom_field(self):
|
||||
# setup
|
||||
custom_field = baker.make("core.CustomField")
|
||||
|
||||
# test not found
|
||||
r = self.client.delete("/core/customfields/500/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/core/customfields/{custom_field.id}/" # type: ignore
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.assertFalse(CustomField.objects.filter(pk=custom_field.id).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@@ -10,4 +10,6 @@ urlpatterns = [
|
||||
path("emailtest/", views.email_test),
|
||||
path("dashinfo/", views.dashboard_info),
|
||||
path("servermaintenance/", views.server_maintenance),
|
||||
path("customfields/", views.GetAddCustomFields.as_view()),
|
||||
path("customfields/<int:pk>/", views.GetUpdateDeleteCustomFields.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.exceptions import ParseError
|
||||
@@ -10,8 +11,8 @@ from rest_framework.views import APIView
|
||||
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import CoreSettings
|
||||
from .serializers import CoreSettingsSerializer
|
||||
from .models import CoreSettings, CustomField
|
||||
from .serializers import CoreSettingsSerializer, CustomFieldSerializer
|
||||
|
||||
|
||||
class UploadMeshAgent(APIView):
|
||||
@@ -133,3 +134,46 @@ def server_maintenance(request):
|
||||
return Response(f"{records_count} records were pruned from the database")
|
||||
|
||||
return notify_error("The data is incorrect")
|
||||
|
||||
|
||||
class GetAddCustomFields(APIView):
|
||||
def get(self, request):
|
||||
fields = CustomField.objects.all()
|
||||
return Response(CustomFieldSerializer(fields, many=True).data)
|
||||
|
||||
def patch(self, request):
|
||||
if "model" in request.data.keys():
|
||||
fields = CustomField.objects.filter(model=request.data["model"])
|
||||
return Response(CustomFieldSerializer(fields, many=True).data)
|
||||
else:
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
def post(self, request):
|
||||
serializer = CustomFieldSerializer(data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class GetUpdateDeleteCustomFields(APIView):
|
||||
def get(self, request, pk):
|
||||
custom_field = get_object_or_404(CustomField, pk=pk)
|
||||
|
||||
return Response(CustomFieldSerializer(custom_field).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
custom_field = get_object_or_404(CustomField, pk=pk)
|
||||
|
||||
serializer = CustomFieldSerializer(
|
||||
instance=custom_field, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
get_object_or_404(CustomField, pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class NatsapiConfig(AppConfig):
|
||||
name = "natsapi"
|
||||
@@ -1,36 +0,0 @@
|
||||
from django.conf import settings
|
||||
from model_bakery import baker
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class TestNatsAPIViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_nats_agents(self):
|
||||
baker.make_recipe(
|
||||
"agents.online_agent", version=settings.LATEST_AGENT_VER, _quantity=14
|
||||
)
|
||||
|
||||
baker.make_recipe(
|
||||
"agents.offline_agent", version=settings.LATEST_AGENT_VER, _quantity=6
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.overdue_agent", version=settings.LATEST_AGENT_VER, _quantity=5
|
||||
)
|
||||
|
||||
url = "/natsapi/online/agents/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.json()["agent_ids"]), 14)
|
||||
|
||||
url = "/natsapi/offline/agents/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.json()["agent_ids"]), 11)
|
||||
|
||||
url = "/natsapi/asdjaksdasd/agents/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
@@ -1,9 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("natsinfo/", views.nats_info),
|
||||
path("<str:stat>/agents/", views.NatsAgents.as_view()),
|
||||
path("logcrash/", views.LogCrash.as_view()),
|
||||
]
|
||||
@@ -1,60 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from rest_framework.decorators import (
|
||||
api_view,
|
||||
authentication_classes,
|
||||
permission_classes,
|
||||
)
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([])
|
||||
@authentication_classes([])
|
||||
def nats_info(request):
|
||||
return Response({"user": "tacticalrmm", "password": settings.SECRET_KEY})
|
||||
|
||||
|
||||
class NatsAgents(APIView):
|
||||
authentication_classes = [] # type: ignore
|
||||
permission_classes = [] # type: ignore
|
||||
|
||||
def get(self, request, stat: str):
|
||||
if stat not in ["online", "offline"]:
|
||||
return notify_error("invalid request")
|
||||
|
||||
ret: list[str] = []
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
if stat == "online":
|
||||
ret = [i.agent_id for i in agents if i.status == "online"]
|
||||
else:
|
||||
ret = [i.agent_id for i in agents if i.status != "online"]
|
||||
|
||||
return Response({"agent_ids": ret})
|
||||
|
||||
|
||||
class LogCrash(APIView):
|
||||
authentication_classes = [] # type: ignore
|
||||
permission_classes = [] # type: ignore
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agentid"])
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
|
||||
if hasattr(settings, "DEBUGTEST") and settings.DEBUGTEST:
|
||||
logger.info(
|
||||
f"Detected crashed tacticalagent service on {agent.hostname} v{agent.version}, attempting recovery"
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
@@ -1,17 +1,16 @@
|
||||
amqp==5.0.5
|
||||
asgiref==3.3.1
|
||||
asgiref==3.3.4
|
||||
asyncio-nats-client==0.11.4
|
||||
billiard==3.6.3.0
|
||||
celery==5.0.5
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.5
|
||||
channels==3.0.3
|
||||
chardet==4.0.0
|
||||
cryptography==3.4.6
|
||||
decorator==4.4.2
|
||||
Django==3.1.7
|
||||
cryptography==3.4.7
|
||||
daphne==3.0.2
|
||||
Django==3.2.0
|
||||
django-cors-headers==3.7.0
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.2
|
||||
djangorestframework==3.12.4
|
||||
future==0.18.2
|
||||
kombu==5.0.2
|
||||
loguru==0.5.3
|
||||
@@ -28,8 +27,8 @@ redis==3.5.3
|
||||
requests==2.25.1
|
||||
six==1.15.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.53.0
|
||||
urllib3==1.26.3
|
||||
twilio==6.56.0
|
||||
urllib3==1.26.4
|
||||
uWSGI==2.0.19.1
|
||||
validators==0.18.2
|
||||
vine==5.0.0
|
||||
|
||||
@@ -1,219 +1,401 @@
|
||||
[
|
||||
{
|
||||
"filename": "ClearFirefoxCache.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Clear Firefox Cache",
|
||||
"description": "This script will clean up Mozilla Firefox for all users.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "ClearGoogleChromeCache.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Clear Google Chrome Cache",
|
||||
"description": "This script will clean up Google Chrome for all users.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "InstallAdobeReader.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Install Adobe Reader DC",
|
||||
"description": "Installs Adobe Reader DC.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "InstallDuplicati.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Install Duplicati",
|
||||
"description": "This script installs Duplicati 2.0.5.1 as a service.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "Reset-WindowsUpdate.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Reset Windows Update",
|
||||
"description": "This script will reset all of the Windows Updates components to DEFAULT SETTINGS.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "Start-Cleanup.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Cleanup C: drive",
|
||||
"description": "Cleans the C: drive's Window Temperary files, Windows SoftwareDistribution folder, the local users Temperary folder, IIS logs (if applicable) and empties the recycling bin. All deleted files will go into a log transcript in $env:TEMP. By default this script leaves files that are newer than 7 days old however this variable can be edited.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "WindowsDefenderFullScanBackground.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Windows Defender Full Scan",
|
||||
"description": "Runs a Windows Defender Full background scan.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "WindowsDefenderQuickScanBackground.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Windows Defender Quick Scan",
|
||||
"description": "Runs a Quick Scan using Windows Defender in the Background.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "speedtest.py",
|
||||
"submittedBy": "https://github.com/wh1te909",
|
||||
"name": "Speed Test",
|
||||
"description": "Runs a Speed Test",
|
||||
"shell": "python"
|
||||
},
|
||||
{
|
||||
"filename": "Rename-Installed-App.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "Rename Tactical RMM Agent",
|
||||
"description": "Updates the DisplayName registry entry for the Tactical RMM windows agent to your desired name. This script takes 1 required argument: the name you wish to set.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "bitlocker_encrypted_drive_c.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Check C Drive for Bitlocker Status",
|
||||
"description": "Runs a check on drive C for Bitlocker status.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "bitlocker_create_status_report.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Create Bitlocker Status Report",
|
||||
"description": "Creates a Bitlocker status report.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "bitlocker_retrieve_status_report.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Retreive Bitlocker Status Report",
|
||||
"description": "Retreives a Bitlocker status report.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "bios_check.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Check BIOS Information",
|
||||
"description": "Retreives and reports on BIOS make, version, and date .",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "ResetHighPerformancePowerProfiletoDefaults.ps1",
|
||||
"submittedBy": "https://github.com/azulskyknight",
|
||||
"name": "Reset High Perf Power Profile",
|
||||
"description": "Resets monitor, disk, standby, and hibernate timers in the default High Performance power profile to their default values. It also re-indexes the AC and DC power profiles into their default order.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "SetHighPerformancePowerProfile.ps1",
|
||||
"submittedBy": "https://github.com/azulskyknight",
|
||||
"name": "Set High Perf Power Profile",
|
||||
"description": "Sets the High Performance Power profile to the active power profile. Use this to keep machines from falling asleep.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "Windows10Upgrade.ps1",
|
||||
"submittedBy": "https://github.com/RVL-Solutions and https://github.com/darimm",
|
||||
"name": "Windows 10 Upgrade",
|
||||
"description": "Forces an upgrade to the latest release of Windows 10.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "DiskStatus.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Check Disks",
|
||||
"description": "Checks local disks for errors reported in event viewer within the last 24 hours",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "DuplicatiStatus.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Check Duplicati",
|
||||
"description": "Checks Duplicati Backup is running properly over the last 24 hours",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "EnableDefender.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Enable Windows Defender",
|
||||
"description": "Enables Windows Defender and sets preferences",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "OpenSSHServerInstall.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Install SSH",
|
||||
"description": "Installs and enabled OpenSSH Server",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "RDP_enable.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Enable RDP",
|
||||
"description": "Enables RDP",
|
||||
"shell": "cmd"
|
||||
},
|
||||
{
|
||||
"filename": "Speedtest.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "PS Speed Test",
|
||||
"description": "Powershell speed test (win 10 or server2016+)",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "SyncTime.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Sync DC Time",
|
||||
"description": "Syncs time with domain controller",
|
||||
"shell": "cmd"
|
||||
},
|
||||
{
|
||||
"filename": "WinDefenderClearLogs.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Clear Defender Logs",
|
||||
"description": "Clears Windows Defender Logs",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "WinDefenderStatus.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Defender Status",
|
||||
"description": "This will check for Malware, Antispyware, that Windows Defender is Healthy, last scan etc within the last 24 hours",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "disable_FastStartup.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Disable Fast Startup",
|
||||
"description": "Disables Faststartup on Windows 10",
|
||||
"shell": "cmd"
|
||||
},
|
||||
{
|
||||
"filename": "updatetacticalexclusion.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "TRMM Defender Exclusions",
|
||||
"description": "Windows Defender Exclusions for Tactical RMM",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "Display_Message_To_User.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "Display Message To User",
|
||||
"description": "Displays a popup message to the currently logged on user",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "VerifyAntivirus.ps1",
|
||||
"submittedBy": "https://github.com/beejayzed",
|
||||
"name": "Verify Antivirus Status",
|
||||
"description": "Verify and display status for all installed Antiviruses",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "CreateAllUserLogonScript.ps1",
|
||||
"submittedBy": "https://github.com/nr-plaxon",
|
||||
"name": "Create User Logon Script",
|
||||
"description": "Creates a powershell script that runs at logon of any user on the machine in the security context of the user.",
|
||||
"shell": "powershell"
|
||||
}
|
||||
{
|
||||
"guid": "6820cb5e-5a7f-4d9b-8c22-d54677e3cc04",
|
||||
"filename": "Win_Clear_Firefox_Cache.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Clear Firefox Cache",
|
||||
"description": "This script will clean up Mozilla Firefox for all users.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Browsers"
|
||||
},
|
||||
{
|
||||
"guid": "3ff6a386-11d1-4f9d-8cca-1b0563bb6443",
|
||||
"filename": "Win_Clear_Google_Chrome_Cache.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Clear Google Chrome Cache",
|
||||
"description": "This script will clean up Google Chrome for all users.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Browsers"
|
||||
},
|
||||
{
|
||||
"guid": "be1de837-f677-4ac5-aa0c-37a0fc9991fc",
|
||||
"filename": "Win_Install_Adobe_Reader.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Install Adobe Reader DC",
|
||||
"description": "Installs Adobe Reader DC.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey"
|
||||
},
|
||||
{
|
||||
"guid": "2ee134d5-76aa-4160-b334-a1efbc62079f",
|
||||
"filename": "Win_Install_Duplicati.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Install Duplicati",
|
||||
"description": "This script installs Duplicati 2.0.5.1 as a service.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "81cc5bcb-01bf-4b0c-89b9-0ac0f3fe0c04",
|
||||
"filename": "Win_Reset_Windows_Update.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Reset Windows Update",
|
||||
"description": "This script will reset all of the Windows Updates components to DEFAULT SETTINGS.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "8db87ff0-a9b4-4d9d-bc55-377bbcb85b6d",
|
||||
"filename": "Win_Start_Cleanup.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Cleanup C: drive",
|
||||
"description": "Cleans the C: drive's Window Temperary files, Windows SoftwareDistribution folder, the local users Temperary folder, IIS logs (if applicable) and empties the recycling bin. All deleted files will go into a log transcript in $env:TEMP. By default this script leaves files that are newer than 7 days old however this variable can be edited.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "2f28e8c1-ae0f-4b46-a826-f513974526a3",
|
||||
"filename": "Win_Defender_FullScan_Background.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Windows Defender Full Scan",
|
||||
"description": "Runs a Windows Defender Full background scan.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "adf81ddb-3b77-415c-a89b-2ccc826b5aa7",
|
||||
"filename": "Win_Defender_QuickScan_Background.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Windows Defender Quick Scan",
|
||||
"description": "Runs a Quick Scan using Windows Defender in the Background.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "3c46290b-85db-4cd2-93a2-943c8c93b3b1",
|
||||
"filename": "Speedtest.py",
|
||||
"submittedBy": "https://github.com/wh1te909",
|
||||
"name": "Speed Test",
|
||||
"description": "Runs a Speed Test using Python",
|
||||
"shell": "python",
|
||||
"category": "TRMM (Win):Network"
|
||||
},
|
||||
{
|
||||
"guid": "9d34f482-1f0c-4b2f-b65f-a9cf3c13ef5f",
|
||||
"filename": "Win_Rename_Installed_App.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "Rename Tactical RMM Agent",
|
||||
"description": "Updates the DisplayName registry entry for the Tactical RMM windows agent to your desired name. This script takes 1 required argument: the name you wish to set.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):TacticalRMM Related"
|
||||
},
|
||||
{
|
||||
"guid": "525ae965-1dcf-4c17-92b3-5da3cf6819f5",
|
||||
"filename": "Win_Bitlocker_Encrypted_Drive_c.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Check C Drive for Bitlocker Status",
|
||||
"description": "Runs a check on drive C for Bitlocker status.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "2ea35fa2-c227-4d17-a40e-4d39f252e27a",
|
||||
"filename": "Win_Bitlocker_Create_Status_Report.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Create Bitlocker Status Report",
|
||||
"description": "Creates a Bitlocker status report.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "9e5769c1-3873-4941-bf70-e851e0afbd6d",
|
||||
"filename": "Win_Bitlocker_Retrieve_Status_Report.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Retreive Bitlocker Status Report",
|
||||
"description": "Retreives a Bitlocker status report.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "cfa14c28-4dfc-4d4e-95ee-a380652e058d",
|
||||
"filename": "Win_Bios_Check.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Check BIOS Information",
|
||||
"description": "Retreives and reports on BIOS make, version, and date.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"guid": "95a2ee6f-b89b-4551-856e-3081b041caa7",
|
||||
"filename": "Win_Reset_High_Performance_Power_Profile_to_Defaults.ps1",
|
||||
"submittedBy": "https://github.com/azulskyknight",
|
||||
"name": "Reset High Perf Power Profile",
|
||||
"description": "Resets monitor, disk, standby, and hibernate timers in the default High Performance power profile to their default values. It also re-indexes the AC and DC power profiles into their default order.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"guid": "2cbd30b0-84dd-4388-a36d-2e2e980f1a3e",
|
||||
"filename": "Win_Set_High_Performance_Power_Profile.ps1",
|
||||
"submittedBy": "https://github.com/azulskyknight",
|
||||
"name": "Set High Perf Power Profile",
|
||||
"description": "Sets the High Performance Power profile to the active power profile. Use this to keep machines from falling asleep.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"guid": "553236d3-81bc-49f4-af8a-0cff925a7f6d",
|
||||
"filename": "Win_10_Upgrade.ps1",
|
||||
"submittedBy": "https://github.com/RVL-Solutions and https://github.com/darimm",
|
||||
"name": "Windows 10 Upgrade",
|
||||
"description": "Forces an upgrade to the latest release of Windows 10.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "375323e5-cac6-4f35-a304-bb7cef35902d",
|
||||
"filename": "Win_Disk_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Check Disk Hardware Health (using Event Viewer errors)",
|
||||
"description": "Checks local disks for errors reported in event viewer within the last 24 hours",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"guid": "7c14beb4-d1c3-41aa-8e70-92a267d6e080",
|
||||
"filename": "Win_Duplicati_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Check Duplicati",
|
||||
"description": "Checks Duplicati Backup is running properly over the last 24 hours",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "da51111c-aff6-4d87-9d76-0608e1f67fe5",
|
||||
"filename": "Win_Defender_Enable.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Enable Windows Defender",
|
||||
"description": "Enables Windows Defender and sets preferences",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "a223d03a-e22e-40e0-94f2-92dd8c481d14",
|
||||
"filename": "Win_Open_SSH_Server_Install.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Install SSH",
|
||||
"description": "Installs and enabled OpenSSH Server",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"guid": "2435297a-6263-4e90-8688-1847400d0e22",
|
||||
"filename": "Win_RDP_enable.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Enable RDP",
|
||||
"description": "Enables RDP",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"guid": "24f19ead-fdfe-46b4-9dcb-4cd0e12a3940",
|
||||
"filename": "Win_Speedtest.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Speed Test Powershell",
|
||||
"description": "Speed Test with Powershell(win 10 or server2016+)",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network"
|
||||
},
|
||||
{
|
||||
"guid": "a821975c-60df-4d58-8990-6cf8a55b4ee0",
|
||||
"filename": "Win_Sync_Time.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Sync DC Time",
|
||||
"description": "Syncs time with domain controller",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Active Directory"
|
||||
},
|
||||
{
|
||||
"guid": "b720e320-7755-4c89-9992-e1a6c43699ed",
|
||||
"filename": "Win_Defender_Clear_Logs.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Clear Defender Logs",
|
||||
"description": "Clears Windows Defender Logs",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "d980fda3-a068-47eb-8495-1aab07a24e64",
|
||||
"filename": "Win_Defender_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Defender Status",
|
||||
"description": "This will check for Malware, Antispyware, that Windows Defender is Healthy, last scan etc within the last 24 hours",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "9956e936-6fdb-4488-a9d8-8b274658037f",
|
||||
"filename": "Win_Disable_Fast_Startup.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Disable Fast Startup",
|
||||
"description": "Disables Faststartup on Windows 10",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"guid": "2472bbaf-1941-4722-8a58-d1dd0f528801",
|
||||
"filename": "Win_Update_Tactical_Exclusion.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "TRMM Defender Exclusions",
|
||||
"description": "Windows Defender Exclusions for Tactical RMM",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "b253dc76-41a0-48ca-9cea-bee4277402c4",
|
||||
"filename": "Win_Display_Message_To_User.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "Display Message To User",
|
||||
"description": "Displays a popup message to the currently logged on user",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "19224d21-bd39-44bc-b9cf-8f1ba3ca9c11",
|
||||
"filename": "Win_Antivirus_Verify.ps1",
|
||||
"submittedBy": "https://github.com/beejayzed",
|
||||
"name": "Verify Antivirus Status",
|
||||
"description": "Verify and display status for all installed Antiviruses",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "f88c5c52-c6fe-44db-b727-b7912a4279ed",
|
||||
"filename": "Win_Create_All_User_Logon_Script.ps1",
|
||||
"submittedBy": "https://github.com/nr-plaxon",
|
||||
"name": "Create User Logon Script",
|
||||
"description": "Creates a powershell script that runs at logon of any user on the machine in the security context of the user.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "5615aa90-0272-427b-8acf-0ca019612501",
|
||||
"filename": "Win_Chocolatey_Update_Installed.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Chocolatey Update Installed Apps",
|
||||
"description": "Update all apps that were installed using Chocolatey.",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey"
|
||||
},
|
||||
{
|
||||
"guid": "fff8024d-d72e-4457-84fa-6c780f69a16f",
|
||||
"filename": "Win_AD_Check_And_Enable_AD_Recycle_Bin.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "AD - Check and Enable AD Recycle Bin",
|
||||
"description": "Only run on Domain Controllers, checks for Active Directory Recycle Bin and enables if not already enabled",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Active Directory"
|
||||
},
|
||||
{
|
||||
"guid": "71090fc4-faa6-460b-adb0-95d7863544e1",
|
||||
"filename": "Win_Check_Events_for_Bluescreens.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Event Viewer - Check for Bluescreens",
|
||||
"description": "This will check for Bluescreen events on your system",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
{
|
||||
"guid": "5d905886-9eb1-4129-8b81-a013f842eb24",
|
||||
"filename": "Win_Rename_Computer.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Rename Computer",
|
||||
"description": "Rename computer. First parameter will be new PC name. 2nd parameter if yes will auto-reboot machine",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": 30
|
||||
},
|
||||
{
|
||||
"guid": "f396dae2-c768-45c5-bd6c-176e56ed3614",
|
||||
"filename": "Win_Finish_updates_and_restart.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Finish updates and restart",
|
||||
"description": "Finish installing updates and restart PC",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "63f89be0-a9c9-4c61-9b55-bce0b28b90b2",
|
||||
"filename": "Win_Finish_updates_and_shutdown.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Finish updates and shutdown",
|
||||
"description": "Finish installing updates and shutdown PC",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "e09895d5-ca13-44a2-a38c-6e77c740f0e8",
|
||||
"filename": "Win_ScreenConnectAIO.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "ScreenConnect AIO",
|
||||
"description": "Install, Uninstall, Start and Stop ScreenConnect Access Agent",
|
||||
"args": [
|
||||
"-serviceName {{client.ScreenConnectService}}",
|
||||
"-url {{client.ScreenConnectInstaller}}",
|
||||
"-action install"
|
||||
],
|
||||
"default_timeout": "90",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "3abbb62a-3757-492c-8979-b4fc6174845d",
|
||||
"filename": "Win_Disable_AutoRun.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Disable Autorun",
|
||||
"description": "Disable Autorun System Wide",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "30"
|
||||
},
|
||||
{
|
||||
"guid": "4a11877a-7555-494c-ac74-29d6df3c1989",
|
||||
"filename": "Win_Disable_Cortana.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Disable Cortana",
|
||||
"description": "Disable Cortana System Wide",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "30"
|
||||
},
|
||||
{
|
||||
"guid": "28ef1387-dd4f-4bab-b042-26250914e370",
|
||||
"filename": "Win_WOL_Enable_Status.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "WoL - Enable function",
|
||||
"description": "Wake on Lan enable on Dell, HP, Lenovo",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "685d5432-0b84-46d5-98e8-3ec2054150fe",
|
||||
"filename": "Win_WOL_Test_State.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "WoL - Test State",
|
||||
"description": "Wake on Lan test status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "6ce5682a-49db-4c0b-9417-609cf905ac43",
|
||||
"filename": "Win_Win10_Change_Key_and_Activate.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Change Win10 Product Key and Activate",
|
||||
"description": "Insert new product key and Activate. Requires 1 parameter the product key you want to use",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "90"
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-31 01:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0005_auto_20201207_1606'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='script',
|
||||
name='default_timeout',
|
||||
field=models.PositiveIntegerField(default=90),
|
||||
),
|
||||
]
|
||||
19
api/tacticalrmm/scripts/migrations/0007_script_args.py
Normal file
19
api/tacticalrmm/scripts/migrations/0007_script_args.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-01 14:52
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0006_script_default_timeout'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='script',
|
||||
name='args',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,9 @@
|
||||
import base64
|
||||
|
||||
import re
|
||||
from loguru import logger
|
||||
from typing import Any, List, Union
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
@@ -15,6 +19,8 @@ SCRIPT_TYPES = [
|
||||
("builtin", "Built In"),
|
||||
]
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class Script(BaseAuditModel):
|
||||
name = models.CharField(max_length=255)
|
||||
@@ -26,9 +32,16 @@ class Script(BaseAuditModel):
|
||||
script_type = models.CharField(
|
||||
max_length=100, choices=SCRIPT_TYPES, default="userdefined"
|
||||
)
|
||||
args = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
favorite = models.BooleanField(default=False)
|
||||
category = models.CharField(max_length=100, null=True, blank=True)
|
||||
code_base64 = models.TextField(null=True, blank=True)
|
||||
default_timeout = models.PositiveIntegerField(default=90)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
@@ -68,12 +81,27 @@ class Script(BaseAuditModel):
|
||||
s = cls.objects.filter(script_type="builtin").filter(
|
||||
name=script["name"]
|
||||
)
|
||||
|
||||
category = (
|
||||
script["category"] if "category" in script.keys() else "Community"
|
||||
)
|
||||
|
||||
default_timeout = (
|
||||
int(script["default_timeout"])
|
||||
if "default_timeout" in script.keys()
|
||||
else 90
|
||||
)
|
||||
|
||||
args = script["args"] if "args" in script.keys() else []
|
||||
|
||||
if s.exists():
|
||||
i = s.first()
|
||||
i.name = script["name"]
|
||||
i.description = script["description"]
|
||||
i.category = "Community"
|
||||
i.category = category
|
||||
i.shell = script["shell"]
|
||||
i.default_timeout = default_timeout
|
||||
i.args = args
|
||||
|
||||
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||
script_bytes = (
|
||||
@@ -86,8 +114,10 @@ class Script(BaseAuditModel):
|
||||
"name",
|
||||
"description",
|
||||
"category",
|
||||
"default_timeout",
|
||||
"code_base64",
|
||||
"shell",
|
||||
"args",
|
||||
]
|
||||
)
|
||||
else:
|
||||
@@ -106,7 +136,9 @@ class Script(BaseAuditModel):
|
||||
filename=script["filename"],
|
||||
shell=script["shell"],
|
||||
script_type="builtin",
|
||||
category="Community",
|
||||
category=category,
|
||||
default_timeout=default_timeout,
|
||||
args=args,
|
||||
).save()
|
||||
|
||||
@staticmethod
|
||||
@@ -115,3 +147,108 @@ class Script(BaseAuditModel):
|
||||
from .serializers import ScriptSerializer
|
||||
|
||||
return ScriptSerializer(script).data
|
||||
|
||||
@classmethod
|
||||
def parse_script_args(
|
||||
cls, agent, shell: str, args: List[str] = list()
|
||||
) -> Union[List[str], None]:
|
||||
from core.models import CustomField
|
||||
|
||||
if not list:
|
||||
return []
|
||||
|
||||
temp_args = list()
|
||||
|
||||
# pattern to match for injection
|
||||
pattern = re.compile(".*\\{\\{(.*)\\}\\}.*")
|
||||
|
||||
for arg in args:
|
||||
match = pattern.match(arg)
|
||||
if match:
|
||||
# only get the match between the () in regex
|
||||
string = match.group(1)
|
||||
|
||||
# split by period if exists. First should be model and second should be property
|
||||
temp = string.split(".")
|
||||
|
||||
# check for model and property
|
||||
if len(temp) != 2:
|
||||
# ignore arg since it is invalid
|
||||
continue
|
||||
|
||||
if temp[0] == "client":
|
||||
model = "client"
|
||||
obj = agent.client
|
||||
elif temp[0] == "site":
|
||||
model = "site"
|
||||
obj = agent.site
|
||||
elif temp[0] == "agent":
|
||||
model = "agent"
|
||||
obj = agent
|
||||
else:
|
||||
# ignore arg since it is invalid
|
||||
continue
|
||||
|
||||
if hasattr(obj, temp[1]):
|
||||
value = getattr(obj, temp[1])
|
||||
|
||||
elif CustomField.objects.filter(model=model, name=temp[1]).exists():
|
||||
|
||||
field = CustomField.objects.get(model=model, name=temp[1])
|
||||
model_fields = getattr(field, f"{model}_fields")
|
||||
value = None
|
||||
if model_fields.filter(**{model: obj}).exists():
|
||||
value = model_fields.get(**{model: obj}).value
|
||||
|
||||
if not value and field.default_value:
|
||||
value = field.default_value
|
||||
|
||||
# check if value exists and if not use defa
|
||||
if value and field.type == "multiple":
|
||||
value = format_shell_array(shell, value)
|
||||
elif value and field.type == "checkbox":
|
||||
value = format_shell_bool(shell, value)
|
||||
|
||||
if not value:
|
||||
continue
|
||||
|
||||
else:
|
||||
# ignore arg since property is invalid
|
||||
continue
|
||||
|
||||
# replace the value in the arg and push to array
|
||||
# log any unhashable type errors
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
|
||||
else:
|
||||
temp_args.append(arg)
|
||||
|
||||
return temp_args
|
||||
|
||||
|
||||
def format_shell_array(shell: str, value: Any) -> str:
|
||||
if shell == "cmd":
|
||||
return "array args are not supported with batch"
|
||||
elif shell == "powershell":
|
||||
temp_string = ""
|
||||
for item in value:
|
||||
temp_string += item + ","
|
||||
return temp_string.strip(",")
|
||||
else: # python
|
||||
temp_string = ""
|
||||
for item in value:
|
||||
temp_string += item + ","
|
||||
return temp_string.strip(",")
|
||||
|
||||
|
||||
def format_shell_bool(shell: str, value: Any) -> str:
|
||||
if shell == "cmd":
|
||||
return "1" if value else "0"
|
||||
elif shell == "powershell":
|
||||
return "$True" if value else "$False"
|
||||
else: # python
|
||||
return "True" if value else "False"
|
||||
|
||||
@@ -12,8 +12,10 @@ class ScriptTableSerializer(ModelSerializer):
|
||||
"description",
|
||||
"script_type",
|
||||
"shell",
|
||||
"args",
|
||||
"category",
|
||||
"favorite",
|
||||
"default_timeout",
|
||||
]
|
||||
|
||||
|
||||
@@ -25,9 +27,11 @@ class ScriptSerializer(ModelSerializer):
|
||||
"name",
|
||||
"description",
|
||||
"shell",
|
||||
"args",
|
||||
"category",
|
||||
"favorite",
|
||||
"code_base64",
|
||||
"default_timeout",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -7,8 +7,6 @@ from tacticalrmm.celery import app
|
||||
|
||||
@app.task
|
||||
def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
agents = Agent.objects.filter(pk__in=agentpks)
|
||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||
nats_data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
@@ -17,15 +15,13 @@ def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
"shell": shell,
|
||||
},
|
||||
}
|
||||
for agent in agents_nats:
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout) -> None:
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
agents = Agent.objects.filter(pk__in=agentpks)
|
||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||
nats_data = {
|
||||
"func": "runscript",
|
||||
"timeout": timeout,
|
||||
@@ -35,5 +31,5 @@ def handle_bulk_script_task(scriptpk, agentpks, args, timeout) -> None:
|
||||
"shell": script.shell,
|
||||
},
|
||||
}
|
||||
for agent in agents_nats:
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from email.policy import default
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
@@ -23,7 +24,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
serializer = ScriptTableSerializer(scripts, many=True)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -36,10 +37,13 @@ class TestScriptViews(TacticalTestCase):
|
||||
"shell": "powershell",
|
||||
"category": "New",
|
||||
"code": "Some Test Code\nnew Line",
|
||||
"default_timeout": 99,
|
||||
"args": ["hello", "world", r"{{agent.public_ip}}"],
|
||||
"favorite": False,
|
||||
}
|
||||
|
||||
# test without file upload
|
||||
resp = self.client.post(url, data)
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Script.objects.filter(name="Name").exists())
|
||||
self.assertEqual(Script.objects.get(name="Name").code, data["code"])
|
||||
@@ -55,6 +59,10 @@ class TestScriptViews(TacticalTestCase):
|
||||
"shell": "cmd",
|
||||
"category": "New",
|
||||
"filename": file,
|
||||
"default_timeout": 4455,
|
||||
"args": json.dumps(
|
||||
["hello", "world", r"{{agent.public_ip}}"]
|
||||
), # simulate javascript's JSON.stringify() for formData
|
||||
}
|
||||
|
||||
# test with file upload
|
||||
@@ -79,6 +87,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
"description": "Description Change",
|
||||
"shell": script.shell,
|
||||
"code": "Test Code\nAnother Line",
|
||||
"default_timeout": 13344556,
|
||||
}
|
||||
|
||||
# test edit a userdefined script
|
||||
@@ -104,6 +113,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
"shell": script.shell,
|
||||
"favorite": True,
|
||||
"code": "Test Code\nAnother Line",
|
||||
"default_timeout": 54345,
|
||||
}
|
||||
# test marking a builtin script as favorite
|
||||
resp = self.client.put(
|
||||
@@ -120,11 +130,11 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
script = baker.make("scripts.Script")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/script/" # type: ignore
|
||||
serializer = ScriptSerializer(script)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -160,27 +170,27 @@ class TestScriptViews(TacticalTestCase):
|
||||
script = baker.make(
|
||||
"scripts.Script", code_base64="VGVzdA==", shell="powershell"
|
||||
)
|
||||
url = f"/scripts/{script.pk}/download/"
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.ps1", "code": "Test"})
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.ps1", "code": "Test"}) # type: ignore
|
||||
|
||||
# test batch file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="cmd")
|
||||
url = f"/scripts/{script.pk}/download/"
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.bat", "code": "Test"})
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.bat", "code": "Test"}) # type: ignore
|
||||
|
||||
# test python file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="python")
|
||||
url = f"/scripts/{script.pk}/download/"
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.py", "code": "Test"})
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.py", "code": "Test"}) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -197,6 +207,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
) as f:
|
||||
info = json.load(f)
|
||||
|
||||
guids = []
|
||||
for script in info:
|
||||
fn: str = script["filename"]
|
||||
self.assertTrue(os.path.exists(os.path.join(scripts_dir, fn)))
|
||||
@@ -213,6 +224,19 @@ class TestScriptViews(TacticalTestCase):
|
||||
elif fn.endswith(".py"):
|
||||
self.assertEqual(script["shell"], "python")
|
||||
|
||||
if "args" in script.keys():
|
||||
self.assertIsInstance(script["args"], list)
|
||||
|
||||
# allows strings as long as they can be type casted to int
|
||||
if "default_timeout" in script.keys():
|
||||
self.assertIsInstance(int(script["default_timeout"]), int)
|
||||
|
||||
self.assertIn("guid", script.keys())
|
||||
guids.append(script["guid"])
|
||||
|
||||
# check guids are unique
|
||||
self.assertEqual(len(guids), len(set(guids)))
|
||||
|
||||
def test_load_community_scripts(self):
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||
@@ -227,3 +251,12 @@ class TestScriptViews(TacticalTestCase):
|
||||
# test updating already added community scripts
|
||||
Script.load_community_scripts()
|
||||
self.assertEqual(len(info), community_scripts)
|
||||
|
||||
def test_script_filenames_do_not_contain_spaces(self):
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||
) as f:
|
||||
info = json.load(f)
|
||||
for script in info:
|
||||
fn: str = script["filename"]
|
||||
self.assertTrue(" " not in fn)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import base64
|
||||
import json
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -24,25 +25,33 @@ class GetAddScripts(APIView):
|
||||
return Response(ScriptTableSerializer(scripts, many=True).data)
|
||||
|
||||
def post(self, request, format=None):
|
||||
|
||||
data = {
|
||||
"name": request.data["name"],
|
||||
"category": request.data["category"],
|
||||
"description": request.data["description"],
|
||||
"shell": request.data["shell"],
|
||||
"default_timeout": request.data["default_timeout"],
|
||||
"script_type": "userdefined", # force all uploads to be userdefined. built in scripts cannot be edited by user
|
||||
}
|
||||
|
||||
if "favorite" in request.data:
|
||||
# code editor upload
|
||||
if "args" in request.data.keys() and isinstance(request.data["args"], list):
|
||||
data["args"] = request.data["args"]
|
||||
|
||||
# file upload, have to json load it cuz it's formData
|
||||
if "args" in request.data.keys() and "file_upload" in request.data.keys():
|
||||
data["args"] = json.loads(request.data["args"])
|
||||
|
||||
if "favorite" in request.data.keys():
|
||||
data["favorite"] = request.data["favorite"]
|
||||
|
||||
if "filename" in request.data:
|
||||
if "filename" in request.data.keys():
|
||||
message_bytes = request.data["filename"].read()
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode(
|
||||
"ascii", "ignore"
|
||||
)
|
||||
|
||||
elif "code" in request.data:
|
||||
elif "code" in request.data.keys():
|
||||
message_bytes = request.data["code"].encode("ascii", "ignore")
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||
|
||||
|
||||
@@ -18,8 +18,6 @@ logger.configure(**settings.LOG_CONFIG)
|
||||
@api_view()
|
||||
def get_services(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "winservices"}, timeout=10))
|
||||
|
||||
if r == "timeout":
|
||||
@@ -38,8 +36,6 @@ def default_services(request):
|
||||
@api_view(["POST"])
|
||||
def service_action(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
action = request.data["sv_action"]
|
||||
data = {
|
||||
"func": "winsvcaction",
|
||||
@@ -80,8 +76,6 @@ def service_action(request):
|
||||
@api_view()
|
||||
def service_detail(request, pk, svcname):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
data = {"func": "winsvcdetail", "payload": {"name": svcname}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||
if r == "timeout":
|
||||
@@ -93,8 +87,6 @@ def service_detail(request, pk, svcname):
|
||||
@api_view(["POST"])
|
||||
def edit_service(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
data = {
|
||||
"func": "editwinsvc",
|
||||
"payload": {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from model_bakery import baker
|
||||
@@ -39,7 +40,7 @@ class TestSoftwareViews(TacticalTestCase):
|
||||
# test without agent software
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEquals(resp.data, [])
|
||||
self.assertEquals(resp.data, []) # type: ignore
|
||||
|
||||
# make some software
|
||||
software = baker.make(
|
||||
@@ -51,6 +52,70 @@ class TestSoftwareViews(TacticalTestCase):
|
||||
serializer = InstalledSoftwareSerializer(software)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEquals(resp.data, serializer.data)
|
||||
self.assertEquals(resp.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_install(self, nats_cmd):
|
||||
url = "/software/install/"
|
||||
old_agent = baker.make_recipe("agents.online_agent", version="1.4.7")
|
||||
data = {
|
||||
"pk": old_agent.pk,
|
||||
"name": "duplicati",
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
agent = baker.make_recipe(
|
||||
"agents.online_agent", version=settings.LATEST_AGENT_VER
|
||||
)
|
||||
data = {
|
||||
"pk": agent.pk,
|
||||
"name": "duplicati",
|
||||
}
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_refresh_installed(self, nats_cmd):
|
||||
url = "/software/refresh/4827342/"
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
nats_cmd.return_value = "timeout"
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
baker.make(
|
||||
"software.InstalledSoftware",
|
||||
agent=agent,
|
||||
software={},
|
||||
)
|
||||
url = f"/software/refresh/{agent.pk}/"
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/software1.json")
|
||||
) as f:
|
||||
sw = json.load(f)
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = sw
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
s = agent.installedsoftware_set.first()
|
||||
s.delete()
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -63,8 +63,6 @@ def get_installed(request, pk):
|
||||
@api_view()
|
||||
def refresh_installed(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r: Any = asyncio.run(agent.nats_cmd({"func": "softwarelist"}, timeout=15))
|
||||
if r == "timeout" or r == "natsdown":
|
||||
|
||||
20
api/tacticalrmm/tacticalrmm/asgi.py
Normal file
20
api/tacticalrmm/tacticalrmm/asgi.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import os
|
||||
|
||||
import django
|
||||
|
||||
from channels.routing import ProtocolTypeRouter, URLRouter # isort:skip
|
||||
from django.core.asgi import get_asgi_application # isort:skip
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tacticalrmm.settings")
|
||||
django.setup()
|
||||
|
||||
from tacticalrmm.utils import KnoxAuthMiddlewareStack # isort:skip
|
||||
from .urls import ws_urlpatterns # isort:skip
|
||||
|
||||
|
||||
application = ProtocolTypeRouter(
|
||||
{
|
||||
"http": get_asgi_application(),
|
||||
"websocket": KnoxAuthMiddlewareStack(URLRouter(ws_urlpatterns)),
|
||||
}
|
||||
)
|
||||
@@ -35,6 +35,14 @@ app.conf.beat_schedule = {
|
||||
"task": "agents.tasks.auto_self_agent_update_task",
|
||||
"schedule": crontab(minute=35, hour="*"),
|
||||
},
|
||||
"monitor-agents": {
|
||||
"task": "agents.tasks.monitor_agents_task",
|
||||
"schedule": crontab(minute="*/7"),
|
||||
},
|
||||
"get-wmi": {
|
||||
"task": "agents.tasks.get_wmi_task",
|
||||
"schedule": crontab(minute="*/18"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@ def get_debug_info():
|
||||
|
||||
|
||||
EXCLUDE_PATHS = (
|
||||
"/natsapi",
|
||||
"/api/v3",
|
||||
"/logs/auditlogs",
|
||||
f"/{settings.ADMIN_URL}",
|
||||
|
||||
@@ -15,24 +15,33 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# latest release
|
||||
TRMM_VERSION = "0.4.23"
|
||||
TRMM_VERSION = "0.5.2"
|
||||
|
||||
# bump this version everytime vue code is changed
|
||||
# to alert user they need to manually refresh their browser
|
||||
APP_VER = "0.0.121"
|
||||
APP_VER = "0.0.127"
|
||||
|
||||
# https://github.com/wh1te909/rmmagent
|
||||
LATEST_AGENT_VER = "1.4.10"
|
||||
LATEST_AGENT_VER = "1.4.13"
|
||||
|
||||
MESH_VER = "0.7.84"
|
||||
MESH_VER = "0.7.93"
|
||||
|
||||
# for the update script, bump when need to recreate venv or npm install
|
||||
PIP_VER = "11"
|
||||
NPM_VER = "10"
|
||||
PIP_VER = "15"
|
||||
NPM_VER = "14"
|
||||
|
||||
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
|
||||
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
|
||||
|
||||
EXE_GEN_URLS = [
|
||||
"https://exe2.tacticalrmm.io/api/v1/exe",
|
||||
"https://exe.tacticalrmm.io/api/v1/exe",
|
||||
]
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
|
||||
|
||||
ASGI_APPLICATION = "tacticalrmm.asgi.application"
|
||||
|
||||
try:
|
||||
from .local_settings import *
|
||||
except ImportError:
|
||||
@@ -43,6 +52,7 @@ INSTALLED_APPS = [
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.staticfiles",
|
||||
"channels",
|
||||
"rest_framework",
|
||||
"rest_framework.authtoken",
|
||||
"knox",
|
||||
@@ -61,7 +71,6 @@ INSTALLED_APPS = [
|
||||
"logs",
|
||||
"scripts",
|
||||
"alerts",
|
||||
"natsapi",
|
||||
]
|
||||
|
||||
if not "AZPIPELINE" in os.environ:
|
||||
|
||||
2012
api/tacticalrmm/tacticalrmm/test_data/software1.json
Normal file
2012
api/tacticalrmm/tacticalrmm/test_data/software1.json
Normal file
File diff suppressed because it is too large
Load Diff
104
api/tacticalrmm/tacticalrmm/tests.py
Normal file
104
api/tacticalrmm/tacticalrmm/tests.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import json
|
||||
import os
|
||||
from unittest.mock import mock_open, patch
|
||||
|
||||
import requests
|
||||
from django.conf import settings
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
from .utils import (
|
||||
bitdays_to_string,
|
||||
filter_software,
|
||||
generate_winagent_exe,
|
||||
get_bit_days,
|
||||
reload_nats,
|
||||
)
|
||||
|
||||
|
||||
class TestUtils(TestCase):
|
||||
@patch("requests.post")
|
||||
@patch("__main__.__builtins__.open", new_callable=mock_open)
|
||||
def test_generate_winagent_exe_success(self, m_open, mock_post):
|
||||
r = generate_winagent_exe(
|
||||
client=1,
|
||||
site=1,
|
||||
agent_type="server",
|
||||
rdp=1,
|
||||
ping=0,
|
||||
power=0,
|
||||
arch="64",
|
||||
token="abc123",
|
||||
api="https://api.example.com",
|
||||
file_name="rmm-client-site-server.exe",
|
||||
)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@patch("requests.post")
|
||||
def test_generate_winagent_exe_timeout(self, mock_post):
|
||||
mock_post.side_effect = requests.exceptions.ConnectionError()
|
||||
|
||||
r = generate_winagent_exe(
|
||||
client=1,
|
||||
site=1,
|
||||
agent_type="server",
|
||||
rdp=1,
|
||||
ping=0,
|
||||
power=0,
|
||||
arch="64",
|
||||
token="abc123",
|
||||
api="https://api.example.com",
|
||||
file_name="rmm-client-site-server.exe",
|
||||
)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@override_settings(
|
||||
CERT_FILE="/tmp/asdasd.pem",
|
||||
KEY_FILE="/tmp/asds55r.pem",
|
||||
ALLOWED_HOSTS=["api.example.com"],
|
||||
SECRET_KEY="sekret",
|
||||
DOCKER_BUILD=True,
|
||||
)
|
||||
@patch("subprocess.run")
|
||||
def test_reload_nats_docker(self, mock_subprocess):
|
||||
_ = reload_nats()
|
||||
|
||||
mock_subprocess.assert_not_called()
|
||||
|
||||
@override_settings(
|
||||
ALLOWED_HOSTS=["api.example.com"],
|
||||
SECRET_KEY="sekret",
|
||||
)
|
||||
@patch("subprocess.run")
|
||||
def test_reload_nats(self, mock_subprocess):
|
||||
_ = reload_nats()
|
||||
|
||||
mock_subprocess.assert_called_once()
|
||||
|
||||
def test_bitdays_to_string(self):
|
||||
a = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"]
|
||||
all_days = [
|
||||
"Monday",
|
||||
"Tuesday",
|
||||
"Wednesday",
|
||||
"Thursday",
|
||||
"Friday",
|
||||
"Saturday",
|
||||
"Sunday",
|
||||
]
|
||||
|
||||
bit_weekdays = get_bit_days(a)
|
||||
r = bitdays_to_string(bit_weekdays)
|
||||
self.assertEqual(r, ", ".join(a))
|
||||
|
||||
bit_weekdays = get_bit_days(all_days)
|
||||
r = bitdays_to_string(bit_weekdays)
|
||||
self.assertEqual(r, "Every day")
|
||||
|
||||
def test_filter_software(self):
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/software1.json")
|
||||
) as f:
|
||||
sw = json.load(f)
|
||||
|
||||
r = filter_software(sw)
|
||||
self.assertIsInstance(r, list)
|
||||
@@ -3,6 +3,7 @@ from django.urls import include, path
|
||||
from knox import views as knox_views
|
||||
|
||||
from accounts.views import CheckCreds, LoginView
|
||||
from core import consumers
|
||||
|
||||
urlpatterns = [
|
||||
path("checkcreds/", CheckCreds.as_view()),
|
||||
@@ -23,10 +24,13 @@ urlpatterns = [
|
||||
path("scripts/", include("scripts.urls")),
|
||||
path("alerts/", include("alerts.urls")),
|
||||
path("accounts/", include("accounts.urls")),
|
||||
path("natsapi/", include("natsapi.urls")),
|
||||
]
|
||||
|
||||
if hasattr(settings, "ADMIN_ENABLED") and settings.ADMIN_ENABLED:
|
||||
from django.contrib import admin
|
||||
|
||||
urlpatterns += (path(settings.ADMIN_URL, admin.site.urls),)
|
||||
|
||||
ws_urlpatterns = [
|
||||
path("ws/dashinfo/", consumers.DashInfo.as_asgi()), # type: ignore
|
||||
]
|
||||
|
||||
@@ -2,12 +2,18 @@ import json
|
||||
import os
|
||||
import string
|
||||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
from typing import Union
|
||||
|
||||
import pytz
|
||||
import requests
|
||||
from channels.auth import AuthMiddlewareStack
|
||||
from channels.db import database_sync_to_async
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.http import FileResponse
|
||||
from knox.auth import TokenAuthentication
|
||||
from loguru import logger
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
@@ -31,128 +37,69 @@ WEEK_DAYS = {
|
||||
}
|
||||
|
||||
|
||||
def generate_installer_exe(
|
||||
file_name: str,
|
||||
goarch: str,
|
||||
inno: str,
|
||||
api: str,
|
||||
client_id: int,
|
||||
site_id: int,
|
||||
atype: str,
|
||||
def generate_winagent_exe(
|
||||
client: int,
|
||||
site: int,
|
||||
agent_type: str,
|
||||
rdp: int,
|
||||
ping: int,
|
||||
power: int,
|
||||
download_url: str,
|
||||
arch: str,
|
||||
token: str,
|
||||
) -> Union[Response, HttpResponse]:
|
||||
api: str,
|
||||
file_name: str,
|
||||
) -> Union[Response, FileResponse]:
|
||||
|
||||
go_bin = "/usr/local/rmmgo/go/bin/go"
|
||||
if not os.path.exists(go_bin):
|
||||
return Response("nogolang", status=status.HTTP_409_CONFLICT)
|
||||
inno = (
|
||||
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||
if arch == "64"
|
||||
else f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe"
|
||||
)
|
||||
|
||||
exe = os.path.join(settings.EXE_DIR, file_name)
|
||||
if os.path.exists(exe):
|
||||
try:
|
||||
os.remove(exe)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
data = {
|
||||
"client": client,
|
||||
"site": site,
|
||||
"agenttype": agent_type,
|
||||
"rdp": str(rdp),
|
||||
"ping": str(ping),
|
||||
"power": str(power),
|
||||
"goarch": "amd64" if arch == "64" else "386",
|
||||
"token": token,
|
||||
"inno": inno,
|
||||
"url": settings.DL_64 if arch == "64" else settings.DL_32,
|
||||
"api": api,
|
||||
}
|
||||
headers = {"Content-type": "application/json"}
|
||||
|
||||
cmd = [
|
||||
"env",
|
||||
"CGO_ENABLED=0",
|
||||
"GOOS=windows",
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"build",
|
||||
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
|
||||
f"-X 'main.Api={api}'",
|
||||
f"-X 'main.Client={client_id}'",
|
||||
f"-X 'main.Site={site_id}'",
|
||||
f"-X 'main.Atype={atype}'",
|
||||
f"-X 'main.Rdp={rdp}'",
|
||||
f"-X 'main.Ping={ping}'",
|
||||
f"-X 'main.Power={power}'",
|
||||
f"-X 'main.DownloadUrl={download_url}'",
|
||||
f"-X 'main.Token={token}'\"",
|
||||
"-o",
|
||||
exe,
|
||||
]
|
||||
errors = []
|
||||
with tempfile.NamedTemporaryFile() as fp:
|
||||
for url in settings.EXE_GEN_URLS:
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
json=data,
|
||||
headers=headers,
|
||||
stream=True,
|
||||
timeout=900,
|
||||
)
|
||||
except Exception as e:
|
||||
errors.append(str(e))
|
||||
else:
|
||||
errors = []
|
||||
break
|
||||
|
||||
build_error = False
|
||||
gen_error = False
|
||||
|
||||
gen = [
|
||||
"env",
|
||||
"GOOS=windows",
|
||||
"CGO_ENABLED=0",
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"generate",
|
||||
]
|
||||
|
||||
try:
|
||||
r1 = subprocess.run(
|
||||
" ".join(gen),
|
||||
capture_output=True,
|
||||
shell=True,
|
||||
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
|
||||
)
|
||||
except Exception as e:
|
||||
gen_error = True
|
||||
logger.error(str(e))
|
||||
return Response("genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE)
|
||||
|
||||
if r1.returncode != 0:
|
||||
gen_error = True
|
||||
if r1.stdout:
|
||||
logger.error(r1.stdout.decode("utf-8", errors="ignore"))
|
||||
|
||||
if r1.stderr:
|
||||
logger.error(r1.stderr.decode("utf-8", errors="ignore"))
|
||||
|
||||
logger.error(f"Go build failed with return code {r1.returncode}")
|
||||
|
||||
if gen_error:
|
||||
return Response("genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE)
|
||||
|
||||
try:
|
||||
r = subprocess.run(
|
||||
" ".join(cmd),
|
||||
capture_output=True,
|
||||
shell=True,
|
||||
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
|
||||
)
|
||||
except Exception as e:
|
||||
build_error = True
|
||||
logger.error(str(e))
|
||||
return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED)
|
||||
|
||||
if r.returncode != 0:
|
||||
build_error = True
|
||||
if r.stdout:
|
||||
logger.error(r.stdout.decode("utf-8", errors="ignore"))
|
||||
|
||||
if r.stderr:
|
||||
logger.error(r.stderr.decode("utf-8", errors="ignore"))
|
||||
|
||||
logger.error(f"Go build failed with return code {r.returncode}")
|
||||
|
||||
if build_error:
|
||||
return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED)
|
||||
|
||||
if settings.DEBUG:
|
||||
with open(exe, "rb") as f:
|
||||
response = HttpResponse(
|
||||
f.read(),
|
||||
content_type="application/vnd.microsoft.portable-executable",
|
||||
if errors:
|
||||
logger.error(errors)
|
||||
return notify_error(
|
||||
"Something went wrong. Check debug error log for exact error message"
|
||||
)
|
||||
response["Content-Disposition"] = f"inline; filename={file_name}"
|
||||
return response
|
||||
else:
|
||||
response = HttpResponse()
|
||||
response["Content-Disposition"] = f"attachment; filename={file_name}"
|
||||
response["X-Accel-Redirect"] = f"/private/exe/{file_name}"
|
||||
return response
|
||||
|
||||
with open(fp.name, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024): # type: ignore
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
return FileResponse(open(fp.name, "rb"), as_attachment=True, filename=file_name)
|
||||
|
||||
|
||||
def get_default_timezone():
|
||||
@@ -164,7 +111,7 @@ def get_default_timezone():
|
||||
def get_bit_days(days: list[str]) -> int:
|
||||
bit_days = 0
|
||||
for day in days:
|
||||
bit_days |= WEEK_DAYS.get(day)
|
||||
bit_days |= WEEK_DAYS.get(day) # type: ignore
|
||||
return bit_days
|
||||
|
||||
|
||||
@@ -254,3 +201,30 @@ def reload_nats():
|
||||
subprocess.run(
|
||||
["/usr/local/bin/nats-server", "-signal", "reload"], capture_output=True
|
||||
)
|
||||
|
||||
|
||||
@database_sync_to_async
|
||||
def get_user(access_token):
|
||||
try:
|
||||
auth = TokenAuthentication()
|
||||
token = access_token.decode().split("access_token=")[1]
|
||||
user = auth.authenticate_credentials(token.encode())
|
||||
except Exception:
|
||||
return AnonymousUser()
|
||||
else:
|
||||
return user[0]
|
||||
|
||||
|
||||
class KnoxAuthMiddlewareInstance:
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
scope["user"] = await get_user(scope["query_string"])
|
||||
|
||||
return await self.app(scope, receive, send)
|
||||
|
||||
|
||||
KnoxAuthMiddlewareStack = lambda inner: KnoxAuthMiddlewareInstance(
|
||||
AuthMiddlewareStack(inner)
|
||||
)
|
||||
|
||||
@@ -19,7 +19,7 @@ def auto_approve_updates_task():
|
||||
# scheduled task that checks and approves updates daily
|
||||
|
||||
agents = Agent.objects.only(
|
||||
"pk", "version", "last_seen", "overdue_time", "offline_time"
|
||||
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
for agent in agents:
|
||||
agent.delete_superseded_updates()
|
||||
@@ -46,7 +46,7 @@ def auto_approve_updates_task():
|
||||
def check_agent_update_schedule_task():
|
||||
# scheduled task that installs updates on agents if enabled
|
||||
agents = Agent.objects.only(
|
||||
"pk", "version", "last_seen", "overdue_time", "offline_time"
|
||||
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
online = [
|
||||
i
|
||||
|
||||
@@ -27,7 +27,7 @@ jobs:
|
||||
source env/bin/activate
|
||||
cd /myagent/_work/1/s/api/tacticalrmm
|
||||
pip install --no-cache-dir --upgrade pip
|
||||
pip install --no-cache-dir setuptools==53.0.0 wheel==0.36.2
|
||||
pip install --no-cache-dir setuptools==54.2.0 wheel==0.36.2
|
||||
pip install --no-cache-dir -r requirements.txt -r requirements-test.txt -r requirements-dev.txt
|
||||
displayName: "Install Python Dependencies"
|
||||
|
||||
|
||||
18
backup.sh
18
backup.sh
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="10"
|
||||
SCRIPT_VERSION="12"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
@@ -8,17 +8,20 @@ YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m'
|
||||
THIS_SCRIPT=$(readlink -f "$0")
|
||||
|
||||
TMP_FILE=$(mktemp -p "" "rmmbackup_XXXXXXXXXX")
|
||||
curl -s -L "${SCRIPT_URL}" > ${TMP_FILE}
|
||||
NEW_VER=$(grep "^SCRIPT_VERSION" "$TMP_FILE" | awk -F'[="]' '{print $3}')
|
||||
|
||||
if [ "${SCRIPT_VERSION}" -ne "${NEW_VER}" ]; then
|
||||
printf >&2 "${YELLOW}A newer version of this backup script is available.${NC}\n"
|
||||
printf >&2 "${YELLOW}Please download the latest version from ${GREEN}${SCRIPT_URL}${YELLOW} and re-run.${NC}\n"
|
||||
rm -f $TMP_FILE
|
||||
exit 1
|
||||
printf >&2 "${YELLOW}Old backup script detected, downloading and replacing with the latest version...${NC}\n"
|
||||
wget -q "${SCRIPT_URL}" -O backup.sh
|
||||
exec ${THIS_SCRIPT}
|
||||
fi
|
||||
|
||||
rm -f $TMP_FILE
|
||||
|
||||
if [ $EUID -eq 0 ]; then
|
||||
echo -ne "\033[0;31mDo NOT run this script as root. Exiting.\e[0m\n"
|
||||
exit 1
|
||||
@@ -69,7 +72,10 @@ sudo tar -czvf ${tmp_dir}/nginx/etc-nginx.tar.gz -C /etc/nginx .
|
||||
|
||||
sudo tar -czvf ${tmp_dir}/confd/etc-confd.tar.gz -C /etc/conf.d .
|
||||
|
||||
sudo cp ${sysd}/rmm.service ${sysd}/celery.service ${sysd}/celerybeat.service ${sysd}/meshcentral.service ${sysd}/nats.service ${sysd}/natsapi.service ${tmp_dir}/systemd/
|
||||
sudo cp ${sysd}/rmm.service ${sysd}/celery.service ${sysd}/celerybeat.service ${sysd}/meshcentral.service ${sysd}/nats.service ${tmp_dir}/systemd/
|
||||
if [ -f "${sysd}/daphne.service" ]; then
|
||||
sudo cp ${sysd}/daphne.service ${tmp_dir}/systemd/
|
||||
fi
|
||||
|
||||
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
|
||||
cp /rmm/api/tacticalrmm/tacticalrmm/local_settings.py /rmm/api/tacticalrmm/app.ini ${tmp_dir}/rmm/
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
FROM node:12-alpine AS builder
|
||||
FROM node:14-alpine AS builder
|
||||
|
||||
WORKDIR /home/node/app
|
||||
|
||||
COPY ./web/package.json .
|
||||
RUN npm install -g npm@latest
|
||||
RUN npm install
|
||||
|
||||
COPY ./web .
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:12-alpine
|
||||
FROM node:14-alpine
|
||||
|
||||
WORKDIR /home/node/app
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM nats:2.1-alpine
|
||||
FROM nats:2.2-alpine
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
@@ -7,9 +7,6 @@ RUN apk add --no-cache inotify-tools supervisor bash
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
COPY natsapi/bin/nats-api /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/nats-api
|
||||
|
||||
COPY docker/containers/tactical-nats/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
|
||||
@@ -3,14 +3,13 @@
|
||||
set -e
|
||||
|
||||
: "${DEV:=0}"
|
||||
: "${API_CONTAINER:=tactical-backend}"
|
||||
: "${API_PORT:=80}"
|
||||
|
||||
if [ "${DEV}" = 1 ]; then
|
||||
NATS_CONFIG=/workspace/api/tacticalrmm/nats-rmm.conf
|
||||
else
|
||||
NATS_CONFIG="${TACTICAL_DIR}/api/nats-rmm.conf"
|
||||
fi
|
||||
|
||||
sleep 15
|
||||
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||
echo "waiting for init container to finish install or update..."
|
||||
@@ -38,11 +37,6 @@ stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
redirect_stderr=true
|
||||
|
||||
[program:nats-api]
|
||||
command=/bin/bash -c "/usr/local/bin/nats-api -debug -api-host http://${API_CONTAINER}:${API_PORT}/natsapi -nats-host tls://${API_HOST}:4222"
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
redirect_stderr=true
|
||||
EOF
|
||||
)"
|
||||
|
||||
|
||||
@@ -63,8 +63,19 @@ server {
|
||||
alias ${TACTICAL_DIR}/api/tacticalrmm/private/;
|
||||
}
|
||||
|
||||
location ~ ^/(natsapi) {
|
||||
deny all;
|
||||
location ~ ^/ws/ {
|
||||
set \$api http://tactical-websockets:8383;
|
||||
proxy_pass \$api;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade \$http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
|
||||
proxy_redirect off;
|
||||
proxy_set_header Host \$host;
|
||||
proxy_set_header X-Real-IP \$remote_addr;
|
||||
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Host \$server_name;
|
||||
}
|
||||
|
||||
error_log /var/log/nginx/api-error.log;
|
||||
|
||||
@@ -30,33 +30,30 @@ FROM python:3.9.2-slim
|
||||
ENV VIRTUAL_ENV /opt/venv
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_TMP_DIR /tmp/tactical
|
||||
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
ENV TACTICAL_USER tactical
|
||||
ENV PATH "${VIRTUAL_ENV}/bin:${TACTICAL_GO_DIR}/go/bin:$PATH"
|
||||
ENV PATH "${VIRTUAL_ENV}/bin:$PATH"
|
||||
|
||||
# copy files from repo
|
||||
COPY api/tacticalrmm ${TACTICAL_TMP_DIR}/api
|
||||
COPY scripts ${TACTICAL_TMP_DIR}/scripts
|
||||
|
||||
# copy go install from build stage
|
||||
COPY --from=golang:1.16 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||
COPY --from=CREATE_VENV_STAGE ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||
|
||||
# install deps
|
||||
RUN apt-get update && \
|
||||
apt-get upgrade -y && \
|
||||
apt-get install -y --no-install-recommends git && \
|
||||
apt-get install -y --no-install-recommends rsync && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
go get github.com/josephspurrier/goversioninfo/cmd/goversioninfo && \
|
||||
groupadd -g 1000 "${TACTICAL_USER}" && \
|
||||
useradd -M -d "${TACTICAL_DIR}" -s /bin/bash -u 1000 -g 1000 "${TACTICAL_USER}"
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
# overwrite goversioninfo file
|
||||
COPY api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||
RUN chmod +x /usr/local/bin/goversioninfo
|
||||
# copy nats-api file
|
||||
COPY natsapi/bin/nats-api /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/nats-api
|
||||
|
||||
# docker init
|
||||
COPY docker/containers/tactical/entrypoint.sh /
|
||||
@@ -65,4 +62,4 @@ ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
WORKDIR ${TACTICAL_DIR}/api
|
||||
|
||||
EXPOSE 80
|
||||
EXPOSE 80 443 8383
|
||||
|
||||
@@ -29,14 +29,15 @@ function check_tactical_ready {
|
||||
# tactical-init
|
||||
if [ "$1" = 'tactical-init' ]; then
|
||||
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
mkdir -p ${TACTICAL_DIR}/scripts/userdefined
|
||||
|
||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||
|
||||
# copy container data to volume
|
||||
cp -af ${TACTICAL_TMP_DIR}/. ${TACTICAL_DIR}/
|
||||
rsync -a --no-perms --no-owner --delete --exclude "tmp/*" --exclude "certs/*" --exclude="api/tacticalrmm/private/*" "${TACTICAL_TMP_DIR}/" "${TACTICAL_DIR}/"
|
||||
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/logs
|
||||
|
||||
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||
echo "waiting for postgresql container to be ready..."
|
||||
sleep 5
|
||||
@@ -62,6 +63,9 @@ DOCKER_BUILD = True
|
||||
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
|
||||
KEY_FILE = '/opt/tactical/certs/privkey.pem'
|
||||
|
||||
EXE_DIR = '/opt/tactical/api/tacticalrmm/private/exe'
|
||||
LOG_DIR = '/opt/tactical/api/tacticalrmm/private/log'
|
||||
|
||||
SCRIPTS_DIR = '/opt/tactical/scripts'
|
||||
|
||||
ALLOWED_HOSTS = ['${API_HOST}', 'tactical-backend']
|
||||
@@ -164,3 +168,12 @@ if [ "$1" = 'tactical-celerybeat' ]; then
|
||||
test -f "${TACTICAL_DIR}/api/celerybeat.pid" && rm "${TACTICAL_DIR}/api/celerybeat.pid"
|
||||
celery -A tacticalrmm beat -l info
|
||||
fi
|
||||
|
||||
# backend container
|
||||
if [ "$1" = 'tactical-websockets' ]; then
|
||||
check_tactical_ready
|
||||
|
||||
export DJANGO_SETTINGS_MODULE=tacticalrmm.settings
|
||||
|
||||
daphne tacticalrmm.asgi:application --port 8383 -b 0.0.0.0
|
||||
fi
|
||||
@@ -22,6 +22,7 @@ volumes:
|
||||
services:
|
||||
# postgres database for api service
|
||||
tactical-postgres:
|
||||
container_name: trmm-postgres
|
||||
image: postgres:13-alpine
|
||||
restart: always
|
||||
environment:
|
||||
@@ -35,6 +36,7 @@ services:
|
||||
|
||||
# redis container for celery tasks
|
||||
tactical-redis:
|
||||
container_name: trmm-redis
|
||||
image: redis:6.0-alpine
|
||||
restart: always
|
||||
networks:
|
||||
@@ -42,6 +44,7 @@ services:
|
||||
|
||||
# used to initialize the docker environment
|
||||
tactical-init:
|
||||
container_name: trmm-init
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
restart: on-failure
|
||||
command: ["tactical-init"]
|
||||
@@ -65,6 +68,7 @@ services:
|
||||
|
||||
# nats
|
||||
tactical-nats:
|
||||
container_name: trmm-nats
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -80,6 +84,7 @@ services:
|
||||
|
||||
# meshcentral container
|
||||
tactical-meshcentral:
|
||||
container_name: trmm-meshcentral
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -101,6 +106,7 @@ services:
|
||||
|
||||
# mongodb container for meshcentral
|
||||
tactical-mongodb:
|
||||
container_name: trmm-mongodb
|
||||
image: mongo:4.4
|
||||
restart: always
|
||||
environment:
|
||||
@@ -114,6 +120,7 @@ services:
|
||||
|
||||
# container that hosts vue frontend
|
||||
tactical-frontend:
|
||||
container_name: trmm-frontend
|
||||
image: ${IMAGE_REPO}tactical-frontend:${VERSION}
|
||||
restart: always
|
||||
networks:
|
||||
@@ -123,6 +130,7 @@ services:
|
||||
|
||||
# container for django backend
|
||||
tactical-backend:
|
||||
container_name: trmm-backend
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-backend"]
|
||||
restart: always
|
||||
@@ -135,8 +143,25 @@ services:
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
|
||||
tactical-nginx:
|
||||
# container for django websockets connections
|
||||
tactical-websockets:
|
||||
container_name: trmm-websockets
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-websockets"]
|
||||
restart: always
|
||||
networks:
|
||||
- proxy
|
||||
- api-db
|
||||
- redis
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
- tactical-backend
|
||||
|
||||
# container for tactical reverse proxy
|
||||
tactical-nginx:
|
||||
container_name: trmm-nginx
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -156,6 +181,7 @@ services:
|
||||
|
||||
# container for celery worker service
|
||||
tactical-celery:
|
||||
container_name: trmm-celery
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-celery"]
|
||||
restart: always
|
||||
@@ -171,6 +197,7 @@ services:
|
||||
|
||||
# container for celery beat service
|
||||
tactical-celerybeat:
|
||||
container_name: trmm-celerybeat
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-celerybeat"]
|
||||
restart: always
|
||||
|
||||
104
docs/docs/contributing_using_devbox.md
Normal file
104
docs/docs/contributing_using_devbox.md
Normal file
@@ -0,0 +1,104 @@
|
||||
|
||||
Hidden docs, needs work
|
||||
|
||||
For local Hyper-v Devbox notes
|
||||
|
||||
From https://raw.githubusercontent.com/silversword411/tacticalrmm-devdocs
|
||||
|
||||
Needs an official install_devbox.sh script
|
||||
|
||||
# Setup local devbox in hyper-v VM
|
||||
|
||||
|
||||
|
||||
|
||||
## Install Ubuntu 20.04 LTS
|
||||
Don't forget to
|
||||
|
||||
```
|
||||
sudo apt-get updates && sudo apt-get upgrade
|
||||
```
|
||||
|
||||
### Optional
|
||||
Set all users in sudo group not to require password every time:
|
||||
|
||||
```
|
||||
sudo visudo
|
||||
```
|
||||
|
||||
Add this:
|
||||
|
||||
```
|
||||
%sudo ALL=(ALL) NOPASSWD: ALL
|
||||
```
|
||||
|
||||
## Download customized install script and tweak
|
||||
|
||||
Create folder to dump into
|
||||
|
||||
```
|
||||
sudo mkdir /rmm
|
||||
sudo chown ${USER}:${USER} -R /rmm
|
||||
cd /rmm
|
||||
```
|
||||
|
||||
Get dev install script
|
||||
```
|
||||
wget https://raw.githubusercontent.com/silversword411/tacticalrmm-devdocs/blob/main/install_devbox.sh
|
||||
```
|
||||
|
||||
Edit, and search for `REPLACEMEWITHYOURFORKEDREPOURL`
|
||||
|
||||
and replace with your forked repo URL (example commented out below)
|
||||
|
||||
## Run it
|
||||
|
||||
```
|
||||
./install_devbox.sh
|
||||
```
|
||||
## Watch for
|
||||
|
||||

|
||||
|
||||
!!!Note Unlike regular installs, don't worry about the QR code
|
||||
|
||||
## Celebrate
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Misc commands
|
||||
|
||||
### Start mkdocs on dev box
|
||||
|
||||
```bash
|
||||
cd /rmm/api
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade setuptools wheel
|
||||
pip install -r tacticalrmm/requirements-dev.txt
|
||||
cd /rmm/docs
|
||||
mkdocs serve
|
||||
```
|
||||
|
||||
### Running tests locally
|
||||
|
||||
Prep and update
|
||||
|
||||
```bash
|
||||
source /rmm/api/env/bin/activate
|
||||
cd /rmm/api/tacticalrmm
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
Then run tests
|
||||
|
||||
```
|
||||
python manage.py test
|
||||
```
|
||||
|
||||
97
docs/docs/contributing_using_vscode.md
Normal file
97
docs/docs/contributing_using_vscode.md
Normal file
@@ -0,0 +1,97 @@
|
||||
|
||||
|
||||
### 1. Install vscode
|
||||
[https://code.visualstudio.com/download](https://code.visualstudio.com/download)
|
||||
|
||||
### 2. Fork Project in Github
|
||||
|
||||
This is making a duplicate of the code under your Github that you can edit
|
||||
|
||||
[https://github.com/wh1te909/tacticalrmm](https://github.com/wh1te909/tacticalrmm)
|
||||
|
||||

|
||||
|
||||
### 3. Add your (forked) repo to vscode
|
||||
|
||||
Clone repository
|
||||
|
||||
Login to your Github
|
||||
|
||||
Choose local folder
|
||||
|
||||
### 3a. Install extra vscode Extensions
|
||||
|
||||
GitLens
|
||||
|
||||
Remote - SSH
|
||||
|
||||
### 4. Open Terminal
|
||||
|
||||
[https://code.visualstudio.com/docs/editor/integrated-terminal](https://code.visualstudio.com/docs/editor/integrated-terminal)
|
||||
|
||||
```
|
||||
Ctrl+`
|
||||
```
|
||||
|
||||
### 5. Configure a remote for your fork (in vscode)
|
||||
|
||||
[https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/configuring-a-remote-for-a-fork](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/configuring-a-remote-for-a-fork)
|
||||
|
||||
Configure your local fork and tell it where the original code repo is so you can compare and merge updates later when official repo is updated
|
||||
|
||||
Check repos
|
||||
|
||||
```
|
||||
git remote -v
|
||||
```
|
||||
|
||||
Add upstream repo
|
||||
|
||||
```
|
||||
git remote add upstream https://github.com/wh1te909/tacticalrmm
|
||||
```
|
||||
|
||||
Confirm changes
|
||||
|
||||
```
|
||||
git remote -v
|
||||
```
|
||||
|
||||
|
||||
### 6. Contribute code
|
||||
|
||||
Make changes to something.
|
||||
|
||||
`Commit` (update something) and notate what you did
|
||||
|
||||
`Push` (from your local vscode to your github fork)
|
||||
|
||||
Open browser and look at your repo (It should reflect your commit)
|
||||
|
||||
|
||||
#### 6a. Request your changes to be pulled into the primary repo (Pull Request)
|
||||
|
||||

|
||||
|
||||
In browser create pull request
|
||||
|
||||
### 7. Sync your local fork
|
||||
|
||||
[https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/syncing-a-fork](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/syncing-a-fork)
|
||||
|
||||
Bring changes from original repo to your local vscode copy so you're current with changes made in original Github repo
|
||||
|
||||

|
||||
|
||||
```
|
||||
git pull --rebase upstream develop
|
||||
```
|
||||
#### 7a. Push your local updated copy to your Github fork
|
||||
|
||||
Then you're `push`ing that updated local repo to your online Github fork
|
||||
|
||||

|
||||
|
||||
### 8. Verify and Repeat
|
||||
|
||||
Check your Github fork in browser, should be up to date now with original. Repeat 6 or 7 as necessary
|
||||
74
docs/docs/example_nginx.md
Normal file
74
docs/docs/example_nginx.md
Normal file
@@ -0,0 +1,74 @@
|
||||
example of `/etc/nginx/sites-available/rmm.conf`
|
||||
|
||||
**DO NOT COPY PASTE INTO YOUR SERVER ONLY USE AS A REFERENCE**
|
||||
|
||||
```
|
||||
server_tokens off;
|
||||
|
||||
upstream tacticalrmm {
|
||||
server unix:////rmm/api/tacticalrmm/tacticalrmm.sock;
|
||||
}
|
||||
|
||||
map $http_user_agent $ignore_ua {
|
||||
"~python-requests.*" 0;
|
||||
"~go-resty.*" 0;
|
||||
default 1;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name api.example.com;
|
||||
return 301 https://$server_name$request_uri;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name api.example.com;
|
||||
client_max_body_size 300M;
|
||||
access_log /rmm/api/tacticalrmm/tacticalrmm/private/log/access.log combined if=$ignore_ua;
|
||||
error_log /rmm/api/tacticalrmm/tacticalrmm/private/log/error.log;
|
||||
ssl_certificate /etc/letsencrypt/live/example.com/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/example.com/privkey.pem;
|
||||
ssl_ciphers 'ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384';
|
||||
|
||||
location /static/ {
|
||||
root /rmm/api/tacticalrmm;
|
||||
}
|
||||
|
||||
location /private/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://rmm.example.com";
|
||||
alias /rmm/api/tacticalrmm/tacticalrmm/private/;
|
||||
}
|
||||
|
||||
location ~ ^/(natsapi) {
|
||||
allow 127.0.0.1;
|
||||
deny all;
|
||||
uwsgi_pass tacticalrmm;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
uwsgi_read_timeout 500s;
|
||||
uwsgi_ignore_client_abort on;
|
||||
}
|
||||
|
||||
location ~ ^/ws/ {
|
||||
proxy_pass http://unix:/rmm/daphne.sock;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
|
||||
proxy_redirect off;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Host $server_name;
|
||||
}
|
||||
|
||||
location / {
|
||||
uwsgi_pass tacticalrmm;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
uwsgi_read_timeout 9999s;
|
||||
uwsgi_ignore_client_abort on;
|
||||
}
|
||||
}
|
||||
```
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user