Compare commits

...

49 Commits

Author SHA1 Message Date
wh1te909
8dddd2d896 Release 0.8.3 2021-09-06 09:30:51 +00:00
wh1te909
f319c95c2b bump version 2021-09-06 09:10:00 +00:00
wh1te909
8e972b0907 add docs for api keys 2021-09-06 08:50:18 +00:00
sadnub
395e400215 fix docker build script 2021-09-05 23:52:33 -04:00
sadnub
3685e3111f fix docker prod spinup. Move api container to uwsgi 2021-09-05 23:49:10 -04:00
sadnub
7bb1c75dc6 add auditing to objects URLAction, KeyStore, CustomFields and also audit when url actions are run 2021-09-05 12:32:37 -04:00
sadnub
b20834929c formatting 2021-09-05 11:35:15 -04:00
sadnub
181891757e fix tasks with assigned checks being added to automation policy 2021-09-05 11:22:21 -04:00
wh1te909
b16feeae44 fix debug log 2021-09-05 08:45:41 +00:00
wh1te909
684e049f27 typo 2021-09-05 06:07:46 +00:00
wh1te909
8cebd901b2 update reqs 2021-09-05 01:40:25 +00:00
wh1te909
3c96beb8fb fix celery memory leak 2021-09-04 23:40:57 +00:00
Dan
8a46459cf9 Merge pull request #683 from silversword411/develop
wip script additions and docs updates
2021-09-04 15:46:31 -07:00
Dan
be5c3e9daa Merge pull request #673 from juaromu/docs-securing-nginx
Securing NGINX added to docs
2021-09-04 15:45:37 -07:00
wh1te909
e44453877c skip sw errors fixes #682 2021-09-04 22:23:35 +00:00
wh1te909
f772a4ec56 allow users to reset their own password/2fa fixes #686 2021-09-04 22:15:51 +00:00
wh1te909
44182ec683 fix render error if results are null 2021-09-03 06:29:27 +00:00
wh1te909
b9ab13fa53 hide status field under properly implemented 2021-09-03 06:28:27 +00:00
wh1te909
2ad6721c95 fix pipeline 2021-09-03 05:45:31 +00:00
wh1te909
b7d0604e62 first/last name optional 2021-09-03 05:35:54 +00:00
wh1te909
a7518b4b26 black 2021-09-03 05:34:44 +00:00
wh1te909
50613f5d3e add api auth in settings, removed from local_settings 2021-09-03 05:31:44 +00:00
sadnub
f814767703 add tests and some ui fixes 2021-09-02 23:52:26 -04:00
sadnub
4af86d6456 set alert template on new agents 2021-09-02 21:36:35 -04:00
sadnub
f0a4f00c2d fix properties and block user dashboard access if denied 2021-09-02 21:32:18 -04:00
sadnub
4321affddb allow for creating special tokens for api access and bypassing two factor auth 2021-09-02 21:10:23 -04:00
silversword411
926ed55b9b docs update - Authorized users 2021-09-02 11:28:05 -04:00
silversword411
2ebf308565 Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-09-02 10:33:36 -04:00
silversword411
1c5e736dce wip script network scanner 2021-09-02 10:33:25 -04:00
silversword411
b591f9f5b7 MOAR wips 2021-09-02 08:39:03 -04:00
silversword411
9724882578 wip script for print check 2021-09-02 08:23:05 -04:00
silversword411
ddef2df101 Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-09-02 08:11:21 -04:00
silversword411
8af69c4284 adding alternate ssl to unsupported docs 2021-09-02 07:55:33 -04:00
silversword411
6ebe1ab467 adding alternate ssl to unsupported docs 2021-09-02 07:39:44 -04:00
silversword411
24e4d9cf6d docs Making docker howto visible 2021-09-02 05:21:51 -04:00
silversword411
f35fa0aa58 Troubleshooting docs update 2021-09-01 18:50:18 -04:00
wh1te909
4942f262f1 Release 0.8.2 2021-09-01 07:18:21 +00:00
wh1te909
a20b1a973e bump version 2021-09-01 07:18:09 +00:00
wh1te909
eae5e00706 allow filtering by overdue #674 2021-09-01 06:26:55 +00:00
silversword411
403762d862 wip script additions 2021-08-31 22:45:53 -04:00
sadnub
5c92d4b454 fix bug were script args weren't being substituted when testing scripts 2021-08-31 20:33:36 -04:00
wh1te909
38179b9d38 Release 0.8.1 2021-08-31 06:51:20 +00:00
wh1te909
8f510dde5a bump versions 2021-08-31 06:35:29 +00:00
wh1te909
be42d56e37 fix 500 error when trying to test newly added script 2021-08-31 06:16:40 +00:00
Juan J. Romero
6294530fa3 Securing NGINX added to docs 2021-08-31 15:45:47 +10:00
sadnub
c5c8f5fab1 formatting 2021-08-30 22:32:16 -04:00
sadnub
3d41d79078 change directory for nats configuration file for DOCKER. Fix nats-api commands in dev containers 2021-08-30 22:17:21 -04:00
sadnub
3005061a11 formatting 2021-08-30 08:06:15 -04:00
sadnub
65ea46f457 strip whitespace before processing collector output 2021-08-30 07:42:54 -04:00
67 changed files with 2547 additions and 217 deletions

View File

@@ -25,6 +25,7 @@ POSTGRES_PASS=postgrespass
# DEV SETTINGS
APP_PORT=80
API_PORT=80
API_PROTOCOL=https://
HTTP_PROTOCOL=https
DOCKER_NETWORK=172.21.0.0/24
DOCKER_NGINX_IP=172.21.0.20

View File

@@ -13,12 +13,17 @@ EXPOSE 8000 8383 8005
RUN groupadd -g 1000 tactical && \
useradd -u 1000 -g 1000 tactical
# Copy Dev python reqs
COPY ./requirements.txt /
# Copy nats-api file
COPY natsapi/bin/nats-api /usr/local/bin/
RUN chmod +x /usr/local/bin/nats-api
# Copy Docker Entrypoint
COPY ./entrypoint.sh /
# Copy dev python reqs
COPY .devcontainer/requirements.txt /
# Copy docker entrypoint.sh
COPY .devcontainer/entrypoint.sh /
RUN chmod +x /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm

View File

@@ -6,8 +6,8 @@ services:
image: api-dev
restart: always
build:
context: .
dockerfile: ./api.dockerfile
context: ..
dockerfile: .devcontainer/api.dockerfile
command: ["tactical-api"]
environment:
API_PORT: ${API_PORT}
@@ -127,9 +127,6 @@ services:
init-dev:
container_name: trmm-init-dev
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
restart: on-failure
command: ["tactical-init-dev"]
environment:
@@ -156,9 +153,6 @@ services:
celery-dev:
container_name: trmm-celery-dev
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-celery-dev"]
restart: always
networks:
@@ -174,9 +168,6 @@ services:
celerybeat-dev:
container_name: trmm-celerybeat-dev
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-celerybeat-dev"]
restart: always
networks:
@@ -192,9 +183,6 @@ services:
websockets-dev:
container_name: trmm-websockets-dev
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-websockets-dev"]
restart: always
networks:
@@ -221,6 +209,7 @@ services:
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
APP_PORT: ${APP_PORT}
API_PORT: ${API_PORT}
API_PROTOCOL: ${API_PROTOCOL}
networks:
dev:
ipv4_address: ${DOCKER_NGINX_IP}
@@ -234,9 +223,6 @@ services:
container_name: trmm-mkdocs-dev
image: api-dev
restart: always
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-mkdocs-dev"]
ports:
- "8005:8005"

View File

@@ -78,24 +78,6 @@ DATABASES = {
}
}
REST_FRAMEWORK = {
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'knox.auth.TokenAuthentication',
),
}
if not DEBUG:
REST_FRAMEWORK.update({
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
})
MESH_USERNAME = '${MESH_USER}'
MESH_SITE = 'https://${MESH_HOST}'
MESH_TOKEN_KEY = '${MESH_TOKEN}'

View File

@@ -0,0 +1,34 @@
# Generated by Django 3.2.6 on 2021-09-01 12:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0025_auto_20210721_0424'),
]
operations = [
migrations.CreateModel(
name='APIKey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_by', models.CharField(blank=True, max_length=100, null=True)),
('created_time', models.DateTimeField(auto_now_add=True, null=True)),
('modified_by', models.CharField(blank=True, max_length=100, null=True)),
('modified_time', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=25, unique=True)),
('key', models.CharField(blank=True, max_length=48, unique=True)),
('expiration', models.DateTimeField(blank=True, default=None, null=True)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='role',
name='can_manage_api_keys',
field=models.BooleanField(default=False),
),
]

View File

@@ -0,0 +1,25 @@
# Generated by Django 3.2.6 on 2021-09-03 00:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accounts', '0026_auto_20210901_1247'),
]
operations = [
migrations.AddField(
model_name='apikey',
name='user',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='api_key', to='accounts.user'),
preserve_default=False,
),
migrations.AddField(
model_name='user',
name='block_dashboard_login',
field=models.BooleanField(default=False),
),
]

View File

@@ -1,5 +1,6 @@
from django.contrib.auth.models import AbstractUser
from django.db import models
from django.db.models.fields import CharField, DateTimeField
from logs.models import BaseAuditModel
@@ -24,6 +25,7 @@ CLIENT_TREE_SORT_CHOICES = [
class User(AbstractUser, BaseAuditModel):
is_active = models.BooleanField(default=True)
block_dashboard_login = models.BooleanField(default=False)
totp_key = models.CharField(max_length=50, null=True, blank=True)
dark_mode = models.BooleanField(default=True)
show_community_scripts = models.BooleanField(default=True)
@@ -138,6 +140,9 @@ class Role(BaseAuditModel):
can_manage_accounts = models.BooleanField(default=False)
can_manage_roles = models.BooleanField(default=False)
# authentication
can_manage_api_keys = models.BooleanField(default=False)
def __str__(self):
return self.name
@@ -186,4 +191,22 @@ class Role(BaseAuditModel):
"can_manage_winupdates",
"can_manage_accounts",
"can_manage_roles",
"can_manage_api_keys",
]
class APIKey(BaseAuditModel):
name = CharField(unique=True, max_length=25)
key = CharField(unique=True, blank=True, max_length=48)
expiration = DateTimeField(blank=True, null=True, default=None)
user = models.ForeignKey(
"accounts.User",
related_name="api_key",
on_delete=models.CASCADE,
)
@staticmethod
def serialize(apikey):
from .serializers import APIKeyAuditSerializer
return APIKeyAuditSerializer(apikey).data

View File

@@ -8,6 +8,21 @@ class AccountsPerms(permissions.BasePermission):
if r.method == "GET":
return True
# allow users to reset their own password/2fa see issue #686
base_path = "/accounts/users/"
paths = ["reset/", "reset_totp/"]
if r.path in [base_path + i for i in paths]:
from accounts.models import User
try:
user = User.objects.get(pk=r.data["id"])
except User.DoesNotExist:
pass
else:
if user == r.user:
return True
return _has_perm(r, "can_manage_accounts")
@@ -17,3 +32,9 @@ class RolesPerms(permissions.BasePermission):
return True
return _has_perm(r, "can_manage_roles")
class APIKeyPerms(permissions.BasePermission):
def has_permission(self, r, view):
return _has_perm(r, "can_manage_api_keys")

View File

@@ -1,7 +1,11 @@
import pyotp
from rest_framework.serializers import ModelSerializer, SerializerMethodField
from rest_framework.serializers import (
ModelSerializer,
SerializerMethodField,
ReadOnlyField,
)
from .models import User, Role
from .models import APIKey, User, Role
class UserUISerializer(ModelSerializer):
@@ -17,6 +21,7 @@ class UserUISerializer(ModelSerializer):
"client_tree_splitter",
"loading_bar_color",
"clear_search_when_switching",
"block_dashboard_login",
]
@@ -33,6 +38,7 @@ class UserSerializer(ModelSerializer):
"last_login",
"last_login_ip",
"role",
"block_dashboard_login",
]
@@ -64,3 +70,24 @@ class RoleAuditSerializer(ModelSerializer):
class Meta:
model = Role
fields = "__all__"
class APIKeySerializer(ModelSerializer):
username = ReadOnlyField(source="user.username")
class Meta:
model = APIKey
fields = "__all__"
class APIKeyAuditSerializer(ModelSerializer):
username = ReadOnlyField(source="user.username")
class Meta:
model = APIKey
fields = [
"name",
"username",
"expiration",
]

View File

@@ -1,10 +1,12 @@
from unittest.mock import patch
from django.test import override_settings
from accounts.models import User
from model_bakery import baker, seq
from accounts.models import User, APIKey
from tacticalrmm.test import TacticalTestCase
from accounts.serializers import APIKeySerializer
class TestAccounts(TacticalTestCase):
def setUp(self):
@@ -39,6 +41,12 @@ class TestAccounts(TacticalTestCase):
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, "ok")
# test user set to block dashboard logins
self.bob.block_dashboard_login = True
self.bob.save()
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
@patch("pyotp.TOTP.verify")
def test_login_view(self, mock_verify):
url = "/login/"
@@ -288,6 +296,68 @@ class TestUserAction(TacticalTestCase):
self.check_not_authenticated("patch", url)
class TestAPIKeyViews(TacticalTestCase):
def setUp(self):
self.setup_coresettings()
self.authenticate()
def test_get_api_keys(self):
url = "/accounts/apikeys/"
apikeys = baker.make("accounts.APIKey", key=seq("APIKEY"), _quantity=3)
serializer = APIKeySerializer(apikeys, many=True)
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(serializer.data, resp.data) # type: ignore
self.check_not_authenticated("get", url)
def test_add_api_keys(self):
url = "/accounts/apikeys/"
user = baker.make("accounts.User")
data = {"name": "Name", "user": user.id, "expiration": None}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertTrue(APIKey.objects.filter(name="Name").exists())
self.assertTrue(APIKey.objects.get(name="Name").key)
self.check_not_authenticated("post", url)
def test_modify_api_key(self):
# test a call where api key doesn't exist
resp = self.client.put("/accounts/apikeys/500/", format="json")
self.assertEqual(resp.status_code, 404)
apikey = baker.make("accounts.APIKey", name="Test")
url = f"/accounts/apikeys/{apikey.pk}/" # type: ignore
data = {"name": "New Name"} # type: ignore
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 200)
apikey = APIKey.objects.get(pk=apikey.pk) # type: ignore
self.assertEquals(apikey.name, "New Name")
self.check_not_authenticated("put", url)
def test_delete_api_key(self):
# test a call where api key doesn't exist
resp = self.client.delete("/accounts/apikeys/500/", format="json")
self.assertEqual(resp.status_code, 404)
# test delete api key
apikey = baker.make("accounts.APIKey")
url = f"/accounts/apikeys/{apikey.pk}/" # type: ignore
resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertFalse(APIKey.objects.filter(pk=apikey.pk).exists()) # type: ignore
self.check_not_authenticated("delete", url)
class TestTOTPSetup(TacticalTestCase):
def setUp(self):
self.authenticate()
@@ -313,3 +383,29 @@ class TestTOTPSetup(TacticalTestCase):
r = self.client.post(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, "totp token already set")
class TestAPIAuthentication(TacticalTestCase):
def setUp(self):
# create User and associate to API Key
self.user = User.objects.create(username="api_user", is_superuser=True)
self.api_key = APIKey.objects.create(
name="Test Token", key="123456", user=self.user
)
self.client_setup()
def test_api_auth(self):
url = "/clients/clients/"
# auth should fail if no header set
self.check_not_authenticated("get", url)
# invalid api key in header should return code 400
self.client.credentials(HTTP_X_API_KEY="000000")
r = self.client.get(url, format="json")
self.assertEqual(r.status_code, 401)
# valid api key in header should return code 200
self.client.credentials(HTTP_X_API_KEY="123456")
r = self.client.get(url, format="json")
self.assertEqual(r.status_code, 200)

View File

@@ -12,4 +12,6 @@ urlpatterns = [
path("permslist/", views.PermsList.as_view()),
path("roles/", views.GetAddRoles.as_view()),
path("<int:pk>/role/", views.GetUpdateDeleteRole.as_view()),
path("apikeys/", views.GetAddAPIKeys.as_view()),
path("apikeys/<int:pk>/", views.GetUpdateDeleteAPIKey.as_view()),
]

View File

@@ -13,9 +13,10 @@ from rest_framework.response import Response
from rest_framework.views import APIView
from tacticalrmm.utils import notify_error
from .models import Role, User
from .permissions import AccountsPerms, RolesPerms
from .models import APIKey, Role, User
from .permissions import APIKeyPerms, AccountsPerms, RolesPerms
from .serializers import (
APIKeySerializer,
RoleSerializer,
TOTPSetupSerializer,
UserSerializer,
@@ -47,6 +48,9 @@ class CheckCreds(KnoxLoginView):
user = serializer.validated_data["user"]
if user.block_dashboard_login:
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
# if totp token not set modify response to notify frontend
if not user.totp_key:
login(request, user)
@@ -123,8 +127,10 @@ class GetAddUsers(APIView):
f"ERROR: User {request.data['username']} already exists!"
)
user.first_name = request.data["first_name"]
user.last_name = request.data["last_name"]
if "first_name" in request.data.keys():
user.first_name = request.data["first_name"]
if "last_name" in request.data.keys():
user.last_name = request.data["last_name"]
if "role" in request.data.keys() and isinstance(request.data["role"], int):
role = get_object_or_404(Role, pk=request.data["role"])
user.role = role
@@ -252,3 +258,48 @@ class GetUpdateDeleteRole(APIView):
role = get_object_or_404(Role, pk=pk)
role.delete()
return Response("ok")
class GetAddAPIKeys(APIView):
permission_classes = [IsAuthenticated, APIKeyPerms]
def get(self, request):
apikeys = APIKey.objects.all()
return Response(APIKeySerializer(apikeys, many=True).data)
def post(self, request):
# generate a random API Key
# https://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits/23728630#23728630
import random
import string
request.data["key"] = "".join(
random.SystemRandom().choice(string.ascii_uppercase + string.digits)
for _ in range(32)
)
serializer = APIKeySerializer(data=request.data)
serializer.is_valid(raise_exception=True)
obj = serializer.save()
return Response("The API Key was added")
class GetUpdateDeleteAPIKey(APIView):
permission_classes = [IsAuthenticated, APIKeyPerms]
def put(self, request, pk):
apikey = get_object_or_404(APIKey, pk=pk)
# remove API key is present in request data
if "key" in request.data.keys():
request.data.pop("key")
serializer = APIKeySerializer(instance=apikey, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response("The API Key was edited")
def delete(self, request, pk):
apikey = get_object_or_404(APIKey, pk=pk)
apikey.delete()
return Response("The API Key was deleted")

View File

@@ -106,6 +106,10 @@ class Agent(BaseAuditModel):
self.generate_checks_from_policies()
self.generate_tasks_from_policies()
# calculate alert template for new agents
if not old_agent:
self.set_alert_template()
def __str__(self):
return self.hostname

View File

@@ -78,7 +78,7 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
@app.task
def force_code_sign(pks: list[int]) -> None:
try:
token = CodeSignToken.objects.first().tokenv # type:ignore
token = CodeSignToken.objects.first().token # type:ignore
except:
return

View File

@@ -644,7 +644,11 @@ def run_script(request):
else:
return notify_error("Custom Field was invalid")
value = r if request.data["save_all_output"] else r.split("\n")[-1].strip()
value = (
r.strip()
if request.data["save_all_output"]
else r.strip().split("\n")[-1].strip()
)
field.save_to_field(value)
return Response(r)

View File

@@ -361,7 +361,6 @@ class TaskRunner(APIView):
def patch(self, request, pk, agentid):
from alerts.models import Alert
from logs.models import AuditLog
agent = get_object_or_404(Agent, agent_id=agentid)
task = get_object_or_404(AutomatedTask, pk=pk)

View File

@@ -197,6 +197,14 @@ class AutomatedTask(BaseAuditModel):
def create_policy_task(self, agent=None, policy=None, assigned_check=None):
# added to allow new policy tasks to be assigned to check only when the agent check exists already
if (
self.assigned_check
and agent
and agent.agentchecks.filter(parent_check=self.assigned_check.id).exists()
):
assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.id)
# if policy is present, then this task is being copied to another policy
# if agent is present, then this task is being created on an agent from a policy
# exit if neither are set or if both are set
@@ -413,9 +421,9 @@ class AutomatedTask(BaseAuditModel):
agent_field = self.custom_field.get_or_create_field_value(self.agent)
value = (
self.stdout
self.stdout.strip()
if self.collector_all_output
else self.stdout.split("\n")[-1].strip()
else self.stdout.strip().split("\n")[-1].strip()
)
agent_field.save_to_field(value)

View File

@@ -0,0 +1,73 @@
# Generated by Django 3.2.6 on 2021-09-05 16:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0026_coresettings_audit_log_prune_days'),
]
operations = [
migrations.AddField(
model_name='customfield',
name='created_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='customfield',
name='created_time',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='customfield',
name='modified_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='customfield',
name='modified_time',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='globalkvstore',
name='created_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='globalkvstore',
name='created_time',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='globalkvstore',
name='modified_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='globalkvstore',
name='modified_time',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='urlaction',
name='created_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='urlaction',
name='created_time',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='urlaction',
name='modified_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='urlaction',
name='modified_time',
field=models.DateTimeField(auto_now=True, null=True),
),
]

View File

@@ -232,7 +232,7 @@ FIELD_TYPE_CHOICES = (
MODEL_CHOICES = (("client", "Client"), ("site", "Site"), ("agent", "Agent"))
class CustomField(models.Model):
class CustomField(BaseAuditModel):
order = models.PositiveIntegerField(default=0)
model = models.CharField(max_length=25, choices=MODEL_CHOICES)
@@ -261,6 +261,12 @@ class CustomField(models.Model):
def __str__(self):
return self.name
@staticmethod
def serialize(field):
from .serializers import CustomFieldSerializer
return CustomFieldSerializer(field).data
@property
def default_value(self):
if self.type == "multiple":
@@ -304,22 +310,34 @@ class CodeSignToken(models.Model):
return "Code signing token"
class GlobalKVStore(models.Model):
class GlobalKVStore(BaseAuditModel):
name = models.CharField(max_length=25)
value = models.TextField()
def __str__(self):
return self.name
@staticmethod
def serialize(store):
from .serializers import KeyStoreSerializer
OPEN_ACTIONS = (("window", "New Window"), ("tab", "New Tab"))
return KeyStoreSerializer(store).data
class URLAction(models.Model):
class URLAction(BaseAuditModel):
name = models.CharField(max_length=25)
desc = models.CharField(max_length=100, null=True, blank=True)
pattern = models.TextField()
def __str__(self):
return self.name
@staticmethod
def serialize(action):
from .serializers import URLActionSerializer
return URLActionSerializer(action).data
RUN_ON_CHOICES = (
("client", "Client"),

View File

@@ -58,7 +58,9 @@ def core_maintenance_tasks():
def cache_db_fields_task():
from agents.models import Agent
for agent in Agent.objects.all():
for agent in Agent.objects.prefetch_related("winupdates", "pendingactions").only(
"pending_actions_count", "has_patches_pending", "pk"
):
agent.pending_actions_count = agent.pendingactions.filter(
status="pending"
).count()

View File

@@ -3,7 +3,9 @@ import pprint
import re
from django.conf import settings
from django.db.models.fields import IPAddressField
from django.shortcuts import get_object_or_404
from logs.models import AuditLog
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes
from rest_framework.exceptions import ParseError
@@ -369,6 +371,13 @@ class RunURLAction(APIView):
url_pattern = re.sub("\\{\\{" + string + "\\}\\}", str(value), url_pattern)
AuditLog.audit_url_action(
username=request.user.username,
urlaction=action,
instance=instance,
debug_info={"ip": request._client_ip},
)
return Response(requote_uri(url_pattern))

View File

@@ -0,0 +1,23 @@
# Generated by Django 3.2.6 on 2021-09-05 16:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0017_auto_20210731_1707'),
]
operations = [
migrations.AlterField(
model_name='auditlog',
name='action',
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command'), ('bulk_action', 'Bulk Action'), ('url_action', 'URL Action')], max_length=100),
),
migrations.AlterField(
model_name='auditlog',
name='object_type',
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alerttemplate', 'Alert Template'), ('role', 'Role'), ('urlaction', 'URL Action'), ('keystore', 'Global Key Store'), ('customfield', 'Custom Field')], max_length=100),
),
]

View File

@@ -36,6 +36,7 @@ AUDIT_ACTION_TYPE_CHOICES = [
("execute_script", "Execute Script"),
("execute_command", "Execute Command"),
("bulk_action", "Bulk Action"),
("url_action", "URL Action"),
]
AUDIT_OBJECT_TYPE_CHOICES = [
@@ -52,6 +53,9 @@ AUDIT_OBJECT_TYPE_CHOICES = [
("bulk", "Bulk"),
("alerttemplate", "Alert Template"),
("role", "Role"),
("urlaction", "URL Action"),
("keystore", "Global Key Store"),
("customfield", "Custom Field"),
]
STATUS_CHOICES = [
@@ -190,6 +194,21 @@ class AuditLog(models.Model):
debug_info=debug_info,
)
@staticmethod
def audit_url_action(username, urlaction, instance, debug_info={}):
name = instance.hostname if hasattr(instance, "hostname") else instance.name
classname = type(instance).__name__
AuditLog.objects.create(
username=username,
agent=instance.hostname if classname == "Agent" else None,
agent_id=instance.id if classname == "Agent" else None,
object_type=classname.lower(),
action="url_action",
message=f"{username} ran url action: {urlaction.pattern} on {classname}: {name}",
debug_info=debug_info,
)
@staticmethod
def audit_bulk_action(username, action, affected, debug_info={}):
from agents.models import Agent
@@ -271,22 +290,30 @@ class DebugLog(models.Model):
log_type="system_issues",
):
if get_debug_level() in ["info"]:
cls(log_level="info", agent=agent, log_type=log_type, message=message)
cls.objects.create(
log_level="info", agent=agent, log_type=log_type, message=message
)
@classmethod
def warning(cls, message, agent=None, log_type="system_issues"):
if get_debug_level() in ["info", "warning"]:
cls(log_level="warning", agent=agent, log_type=log_type, message=message)
cls.objects.create(
log_level="warning", agent=agent, log_type=log_type, message=message
)
@classmethod
def error(cls, message, agent=None, log_type="system_issues"):
if get_debug_level() in ["info", "warning", "error"]:
cls(log_level="error", agent=agent, log_type=log_type, message=message)
cls.objects.create(
log_level="error", agent=agent, log_type=log_type, message=message
)
@classmethod
def critical(cls, message, agent=None, log_type="system_issues"):
if get_debug_level() in ["info", "warning", "error", "critical"]:
cls(log_level="critical", agent=agent, log_type=log_type, message=message)
cls.objects.create(
log_level="critical", agent=agent, log_type=log_type, message=message
)
class PendingAction(models.Model):

View File

@@ -8,7 +8,7 @@ channels_redis==3.3.0
chardet==4.0.0
cryptography==3.4.8
daphne==3.0.2
Django==3.2.6
Django==3.2.7
django-cors-headers==3.8.0
django-ipware==3.0.2
django-rest-knox==4.1.0

View File

@@ -128,7 +128,7 @@ class TestScript(APIView):
agent = get_object_or_404(Agent, pk=request.data["agent"])
parsed_args = Script.parse_script_args(
self, request.data["shell"], request.data["args"]
agent, request.data["shell"], request.data["args"]
)
data = {

View File

@@ -14,7 +14,15 @@ class Command(BaseCommand):
agents = Agent.objects.all()
for agent in agents:
sw = agent.installedsoftware_set.first().software
try:
sw = agent.installedsoftware_set.first().software
except:
self.stdout.write(
self.style.ERROR(
f"Agent {agent.hostname} missing software list. Try manually refreshing it from the web UI from the software tab."
)
)
continue
for i in sw:
if search in i["name"].lower():
self.stdout.write(

View File

@@ -0,0 +1,64 @@
from django.utils import timezone as djangotime
from django.utils.translation import ugettext_lazy as _
from rest_framework import exceptions
from rest_framework.authentication import BaseAuthentication, HTTP_HEADER_ENCODING
from accounts.models import APIKey
def get_authorization_header(request):
"""
Return request's 'Authorization:' header, as a bytestring.
Hide some test client ickyness where the header can be unicode.
"""
auth = request.META.get("HTTP_X_API_KEY", b"")
if isinstance(auth, str):
# Work around django test client oddness
auth = auth.encode(HTTP_HEADER_ENCODING)
return auth
class APIAuthentication(BaseAuthentication):
"""
Simple token based authentication for stateless api access.
Clients should authenticate by passing the token key in the "X-API-KEY"
HTTP header. For example:
X-API-KEY: ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789
"""
def get_model(self):
return APIKey
def authenticate(self, request):
auth = get_authorization_header(request)
if not auth:
return None
try:
apikey = auth.decode()
except UnicodeError:
msg = _(
"Invalid token header. Token string should not contain invalid characters."
)
raise exceptions.AuthenticationFailed(msg)
return self.authenticate_credentials(apikey)
def authenticate_credentials(self, key):
try:
apikey = APIKey.objects.select_related("user").get(key=key)
except APIKey.DoesNotExist:
raise exceptions.AuthenticationFailed(_("Invalid token."))
if not apikey.user.is_active:
raise exceptions.AuthenticationFailed(_("User inactive or deleted."))
# check if token is expired
if apikey.expiration and apikey.expiration < djangotime.now():
raise exceptions.AuthenticationFailed(_("The token as expired."))
return (apikey.user, apikey.key)

View File

@@ -1,43 +0,0 @@
SECRET_KEY = 'changeme'
ALLOWED_HOSTS = ['api.example.com']
ADMIN_URL = "somerandomstring/"
CORS_ORIGIN_WHITELIST = ["https://rmm.example.com",]
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'tacticalrmm',
'USER': 'tacticalrmm',
'PASSWORD': 'changeme',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
REST_FRAMEWORK = {
'DATETIME_FORMAT': "%b-%d-%Y - %H:%M",
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'knox.auth.TokenAuthentication',
),
}
if not DEBUG:
REST_FRAMEWORK.update({
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
})
MESH_USERNAME = "changeme"
MESH_SITE = "https://mesh.example.com"
MESH_TOKEN_KEY = "changeme"
REDIS_HOST = "localhost"

View File

@@ -1,3 +1,7 @@
from rest_framework import permissions
from tacticalrmm.auth import APIAuthentication
def _has_perm(request, perm):
if request.user.is_superuser or (
request.user.role and getattr(request.user.role, "is_superuser")

View File

@@ -15,24 +15,24 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
AUTH_USER_MODEL = "accounts.User"
# latest release
TRMM_VERSION = "0.8.0"
TRMM_VERSION = "0.8.3"
# bump this version everytime vue code is changed
# to alert user they need to manually refresh their browser
APP_VER = "0.0.142"
APP_VER = "0.0.145"
# https://github.com/wh1te909/rmmagent
LATEST_AGENT_VER = "1.6.0"
LATEST_AGENT_VER = "1.6.2"
MESH_VER = "0.9.15"
MESH_VER = "0.9.16"
NATS_SERVER_VER = "2.3.3"
# for the update script, bump when need to recreate venv or npm install
PIP_VER = "21"
NPM_VER = "21"
NPM_VER = "22"
SETUPTOOLS_VER = "57.4.0"
SETUPTOOLS_VER = "57.5.0"
WHEEL_VER = "0.37.0"
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
@@ -58,6 +58,21 @@ try:
except ImportError:
pass
REST_FRAMEWORK = {
"DATETIME_FORMAT": "%b-%d-%Y - %H:%M",
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
"DEFAULT_AUTHENTICATION_CLASSES": (
"knox.auth.TokenAuthentication",
"tacticalrmm.auth.APIAuthentication",
),
}
if not "AZPIPELINE" in os.environ:
if not DEBUG: # type: ignore
REST_FRAMEWORK.update(
{"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",)}
)
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
@@ -207,7 +222,10 @@ if "AZPIPELINE" in os.environ:
REST_FRAMEWORK = {
"DATETIME_FORMAT": "%b-%d-%Y - %H:%M",
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
"DEFAULT_AUTHENTICATION_CLASSES": ("knox.auth.TokenAuthentication",),
"DEFAULT_AUTHENTICATION_CLASSES": (
"knox.auth.TokenAuthentication",
"tacticalrmm.auth.APIAuthentication",
),
"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",),
}

View File

@@ -265,7 +265,9 @@ def run_nats_api_cmd(mode: str, ids: list[str] = [], timeout: int = 30) -> None:
"dbname": db["NAME"],
}
with tempfile.NamedTemporaryFile() as fp:
with tempfile.NamedTemporaryFile(
dir="/opt/tactical/tmp" if settings.DOCKER_BUILD else None
) as fp:
with open(fp.name, "w") as f:
json.dump(config, f)

View File

@@ -5,6 +5,7 @@ set -e
: "${WORKER_CONNECTIONS:=2048}"
: "${APP_PORT:=80}"
: "${API_PORT:=80}"
: "${API_PROTOCOL:=}" # blank for uwgsi
CERT_PRIV_PATH=${TACTICAL_DIR}/certs/privkey.pem
CERT_PUB_PATH=${TACTICAL_DIR}/certs/fullchain.pem
@@ -37,20 +38,10 @@ server {
location / {
#Using variable to disable start checks
set \$api http://tactical-backend:${API_PORT};
set \$api ${API_PROTOCOL}tactical-backend:${API_PORT};
proxy_pass \$api;
proxy_http_version 1.1;
proxy_cache_bypass \$http_upgrade;
proxy_set_header Upgrade \$http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto \$scheme;
proxy_set_header X-Forwarded-Host \$host;
proxy_set_header X-Forwarded-Port \$server_port;
include uwsgi_params;
uwsgi_pass \$api;
}
location /static/ {

View File

@@ -18,8 +18,7 @@ RUN apt-get update && \
apt-get install -y --no-install-recommends gcc libc6-dev && \
rm -rf /var/lib/apt/lists/* && \
pip install --upgrade pip && \
pip install --no-cache-dir setuptools wheel gunicorn && \
sed -i '/uWSGI/d' ${TACTICAL_TMP_DIR}/api/requirements.txt && \
pip install --no-cache-dir setuptools wheel && \
pip install --no-cache-dir -r ${TACTICAL_TMP_DIR}/api/requirements.txt

View File

@@ -36,7 +36,8 @@ if [ "$1" = 'tactical-init' ]; then
mkdir -p ${TACTICAL_DIR}/tmp
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/logs
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/log
touch ${TACTICAL_DIR}/api/tacticalrmm/private/log/django_debug.log
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
echo "waiting for postgresql container to be ready..."
@@ -87,24 +88,6 @@ DATABASES = {
}
}
REST_FRAMEWORK = {
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'knox.auth.TokenAuthentication',
),
}
if not DEBUG:
REST_FRAMEWORK.update({
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
})
MESH_USERNAME = '${MESH_USER}'
MESH_SITE = 'https://${MESH_HOST}'
MESH_TOKEN_KEY = '${MESH_TOKEN}'
@@ -116,6 +99,28 @@ EOF
echo "${localvars}" > ${TACTICAL_DIR}/api/tacticalrmm/local_settings.py
uwsgiconf="$(cat << EOF
[uwsgi]
chdir = /opt/tactical/api
module = tacticalrmm.wsgi
home = /opt/venv
master = true
processes = 8
threads = 2
enable-threads = true
socket = 0.0.0.0:80
chmod-socket = 660
buffer-size = 65535
vacuum = true
die-on-term = true
max-requests = 2000
EOF
)"
echo "${uwsgiconf}" > ${TACTICAL_DIR}/api/uwsgi.ini
# run migrations and init scripts
python manage.py migrate --no-input
python manage.py collectstatic --no-input
@@ -141,22 +146,7 @@ fi
if [ "$1" = 'tactical-backend' ]; then
check_tactical_ready
# Prepare log files and start outputting logs to stdout
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/logs
touch ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn.log
touch ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn-access.log
tail -n 0 -f ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn*.log &
export DJANGO_SETTINGS_MODULE=tacticalrmm.settings
exec gunicorn tacticalrmm.wsgi:application \
--name tactical-backend \
--bind 0.0.0.0:80 \
--workers 5 \
--log-level=info \
--log-file=${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn.log \
--access-logfile=${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn-access.log \
uwsgi ${TACTICAL_DIR}/api/uwsgi.ini
fi
if [ "$1" = 'tactical-celery' ]; then
@@ -170,7 +160,7 @@ if [ "$1" = 'tactical-celerybeat' ]; then
celery -A tacticalrmm beat -l info
fi
# backend container
# websocket container
if [ "$1" = 'tactical-websockets' ]; then
check_tactical_ready

View File

@@ -3,7 +3,7 @@
set -o errexit
set -o pipefail
# tactical tactical-frontend tactical-nats tactical-nginx
# tactical tactical-frontend tactical-nats tactical-nginx tactical-meshcentral
DOCKER_IMAGES="tactical tactical-frontend tactical-nats tactical-nginx tactical-meshcentral"
cd ..

View File

@@ -0,0 +1,25 @@
# API Access
*Version added: v0.8.3*
API Keys can be created to access any of TacticalRMM's api endpoints, which will bypass 2fa authentication
When creating the key you'll need to choose a user, which will reflect what permissions the key has based on the user's role.
Navigate to Settings > Global Settings > API Keys to generate a key
Headers:
```json
{
"Content-Type": "application/json",
"X-API-KEY": "J57BXCFDA2WBCXH0XTELBR5KAI69CNCZ"
}
```
Example curl request:
```bash
curl https://api.example.com/clients/clients/ -H "X-API-KEY: Y57BXCFAA9WBCXH0XTEL6R5KAK69CNCZ"
```

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

464
docs/docs/securing_nginx.md Normal file
View File

@@ -0,0 +1,464 @@
# DISCLAIMER
All the settings covered in this document have been tested against Tactical RMM v0.7.2 and v0.8.0.
Before applying these settings in production, if possible, use a pre-production environment so potential disruptions in your own environment and the service that you provide to your clients can be avoided.
**<span style="text-decoration:underline;">Use the contents included in this guide and apply the security settings detailed here at your own discretion.</span>**
# Intro
This section is structured in three main subsections:
* Enabling GeoIP in NGINX config with the purpose of filtering (blocking) web requests based on the countrys source IP.
* Enabling anti “bad” bots/referrers in HTTP requests to the NGINX server.
* Compiling and enabling ModSec + OWASP CRS in NGINX server.
Each section can be enabled independently.
# Hardening NGINX settings
## GeoIP Integration in NGINX - Blocking Requests by Country Code
Install required packages and NGINX module for GeoIP:
```
# apt-get install geoip-database libgeoip1 libnginx-mod-http-geoip
```
Verify that the GeoIP database files have been placed in the right location:
```
# ls -lrt /usr/share/GeoIP/
total 10004
-rw-r--r-- 1 root root 8138841 Jan 24 2020 GeoIPv6.dat
-rw-r--r-- 1 root root 2099217 Jan 24 2020 GeoIP.dat
```
Edit NGINX config file (“/etc/nginx/nginx.conf”) and add the following config under the “http {“ block:
```
http {
##
# Basic Settings
##
# Load GeoIP Database
geoip_country /usr/share/GeoIP/GeoIP.dat;
```
The next settings will depend on the desired GeoIP blocking strategy. For “allow by default, deny by exception”, the config would be:
```
http {
##
# Basic Settings
##
# Load GeoIP Database
geoip_country /usr/share/GeoIP/GeoIP.dat;
# map the list of denied countries
map $geoip_country_code $allowed_country {
default yes;
# BLOCKED_COUNTRY_1
COUNTRY_CODE_1 no;
# BLOCKED_COUNTRY_2
COUNTRY_CODE_2 no;
# BLOCKED_COUNTRY_3
COUNTRY_CODE_3 no;
}
```
(The macro can be modified to achieve the “deny by default, allow by exception” approach).
Finally, the following “if” statement needs to be placed in all the vhosts where the GeoIP blocking should take effect, under the “location” section:
```
location / {
root /var/www/rmm/dist;
try_files $uri $uri/ /index.html;
add_header Cache-Control "no-store, no-cache, must-revalidate";
add_header Pragma "no-cache";
# block the country
if ($allowed_country = no) {
return 444;
}
}
```
The HTTP Status = 444 is a good choice for NGINX not “wasting” too many resources in sending back the 4xx code to the client being blocked by GeoIP.
## Blocking “bad bots” and “bad referrers”
Nginx Bad Bot and User-Agent Blocker, Spam Referrer Blocker, Anti DDOS, Bad IP Blocker and Wordpress Theme Detector Blocker
Source:
[https://github.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker](https://github.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker)
Download “install-ngxblocker” to your /usr/local/sbin/directory and make the script executable.
```
sudo wget https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/install-ngxblocker -O /usr/local/sbin/install-ngxblocker
sudo chmod +x /usr/local/sbin/install-ngxblocker
```
**<span style="text-decoration:underline;">(OPTIONAL)</span>**Now run the ”install-ngxblocker” script in **DRY-MODE** which will show you what changes it will make and what files it will download for you. This is only a DRY-RUN so no changes are being made yet.
The install-ngxblocker downloads all required files including the setup and update scripts.
```
cd /usr/local/sbin
sudo ./install-ngxblocker
```
This will show you output as follows of the changes that will be made (NOTE: this is only a **DRY-RUN** no changes have been made)
```
Checking url: https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/include_filelist.txt
** Dry Run ** | not updating files | run as 'install-ngxblocker -x' to install files.
Creating directory: /etc/nginx/bots.d
REPO = https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master
Downloading [FROM]=> [REPO]/conf.d/globalblacklist.conf [TO]=> /etc/nginx/conf.d/globalblacklist.conf
Downloading [FROM]=> [REPO]/conf.d/botblocker-nginx-settings.conf [TO]=> /etc/nginx/conf.d/botblocker-nginx-settings.conf
REPO = https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master
Downloading [FROM]=> [REPO]/bots.d/blockbots.conf [TO]=> /etc/nginx/bots.d/blockbots.conf
Downloading [FROM]=> [REPO]/bots.d/ddos.conf [TO]=> /etc/nginx/bots.d/ddos.conf
Downloading [FROM]=> [REPO]/bots.d/whitelist-ips.conf [TO]=> /etc/nginx/bots.d/whitelist-ips.conf
Downloading [FROM]=> [REPO]/bots.d/whitelist-domains.conf [TO]=> /etc/nginx/bots.d/whitelist-domains.conf
Downloading [FROM]=> [REPO]/bots.d/blacklist-user-agents.conf [TO]=> /etc/nginx/bots.d/blacklist-user-agents.conf
Downloading [FROM]=> [REPO]/bots.d/blacklist-ips.conf [TO]=> /etc/nginx/bots.d/blacklist-ips.conf
Downloading [FROM]=> [REPO]/bots.d/bad-referrer-words.conf [TO]=> /etc/nginx/bots.d/bad-referrer-words.conf
Downloading [FROM]=> [REPO]/bots.d/custom-bad-referrers.conf [TO]=> /etc/nginx/bots.d/custom-bad-referrers.conf
REPO = https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master
Downloading [FROM]=> [REPO]/setup-ngxblocker [TO]=> /usr/local/sbin/setup-ngxblocker
Downloading [FROM]=> [REPO]/update-ngxblocker [TO]=> /usr/local/sbin/update-ngxblocker
```
Now run the install script with the -x parameter to download all the necessary files from the repository:
```
cd /usr/local/sbin/
sudo ./install-ngxblocker -x
```
This will give you the following output:
```
Checking url: https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/include_filelist.txt
Creating directory: /etc/nginx/bots.d
REPO = https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master
Downloading [FROM]=> [REPO]/conf.d/globalblacklist.conf [TO]=> /etc/nginx/conf.d/globalblacklist.conf...OK
Downloading [FROM]=> [REPO]/conf.d/botblocker-nginx-settings.conf [TO]=> /etc/nginx/conf.d/botblocker-nginx-settings.conf...OK
REPO = https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master
Downloading [FROM]=> [REPO]/bots.d/blockbots.conf [TO]=> /etc/nginx/bots.d/blockbots.conf...OK
Downloading [FROM]=> [REPO]/bots.d/ddos.conf [TO]=> /etc/nginx/bots.d/ddos.conf...OK
Downloading [FROM]=> [REPO]/bots.d/whitelist-ips.conf [TO]=> /etc/nginx/bots.d/whitelist-ips.conf...OK
Downloading [FROM]=> [REPO]/bots.d/whitelist-domains.conf [TO]=> /etc/nginx/bots.d/whitelist-domains.conf...OK
Downloading [FROM]=> [REPO]/bots.d/blacklist-user-agents.conf [TO]=> /etc/nginx/bots.d/blacklist-user-agents.conf...OK
Downloading [FROM]=> [REPO]/bots.d/blacklist-ips.conf [TO]=> /etc/nginx/bots.d/blacklist-ips.conf...OK
Downloading [FROM]=> [REPO]/bots.d/bad-referrer-words.conf [TO]=> /etc/nginx/bots.d/bad-referrer-words.conf...OK
Downloading [FROM]=> [REPO]/bots.d/custom-bad-referrers.conf [TO]=> /etc/nginx/bots.d/custom-bad-referrers.conf...OK
REPO = https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master
Downloading [FROM]=> [REPO]/setup-ngxblocker [TO]=> /usr/local/sbin/setup-ngxblocker...OK
Downloading [FROM]=> [REPO]/update-ngxblocker [TO]=> /usr/local/sbin/update-ngxblocker...OK
```
All the required files have now been downloaded to the correct folders on Nginx for you direct from the repository.
**<span style="text-decoration:underline;">NOTE:</span>** The setup and update scripts can be used, however in this guide the config is done manually. For script execution, refer to the Github page linked above.
Include any public IP addresses that should be whitelisted from bot and referrer analysis/blocking by editing the file “/etc/nginx/bots.d/whitelist-ips.conf”.
Finally, edit every vhost file (“/etc/nginx/sites-enabled/frontend.conf”, “/etc/nginx/sites-enabled/rmm.conf” and “/etc/nginx/sites-enabled/meshcentral.conf”) and place the following include statements under the “server” block:
```
server {
listen 443 ssl;
include /etc/nginx/bots.d/ddos.conf;
include /etc/nginx/bots.d/blockbots.conf;
```
# Enabling ModSec in NGINX
All steps in this section taken from the NGINX blog post “Compiling and Installing ModSecurity for NGINX Open Source”:
[https://www.nginx.com/blog/compiling-and-installing-modsecurity-for-open-source-nginx/](https://www.nginx.com/blog/compiling-and-installing-modsecurity-for-open-source-nginx/)
## Install Prerequisite Packages
The first step is to install the packages required to complete the remaining steps in this tutorial. Run the following command, which is appropriate for a freshly installed Ubuntu/Debian system. The required packages might be different for RHEL/CentOS/Oracle Linux.
```
$ apt-get install -y apt-utils autoconf automake build-essential git libcurl4-openssl-dev libgeoip-dev liblmdb-dev libpcre++-dev libtool libxml2-dev libyajl-dev pkgconf wget zlib1g-dev
```
## Download and Compile the ModSecurity 3.0 Source Code
With the required prerequisite packages installed, the next step is to compile ModSecurity as an NGINX dynamic module. In ModSecurity 3.0s new modular architecture, libmodsecurity is the core component which includes all rules and functionality. The second main component in the architecture is a connector that links libmodsecurity to the web server it is running with. There are separate connectors for NGINX, Apache HTTP Server, and IIS. We cover the NGINX connector in the next section.
To compile libmodsecurity:
Clone the GitHub repository:
```
$ git clone --depth 1 -b v3/master --single-branch https://github.com/SpiderLabs/ModSecurity
```
Change to the ModSecurity directory and compile the source code:
```
$ cd ModSecurity
$ git submodule init
$ git submodule update
$ ./build.sh
$ ./configure
$ make
$ make install
$ cd ..
```
The compilation takes about 15 minutes, depending on the processing power of your system.
Note: Its safe to ignore messages like the following during the build process. Even when they appear, the compilation completes and creates a working object.
```
fatal: No names found, cannot describe anything.
```
## Download the NGINX Connector for ModSecurity and Compile It as a Dynamic Module
Compile the ModSecurity connector for NGINX as a dynamic module for NGINX.
Clone the GitHub repository:
```
$ git clone --depth 1 https://github.com/SpiderLabs/ModSecurity-nginx.git
```
Determine which version of NGINX is running on the host where the ModSecurity module will be loaded:
```
$ nginx -v
nginx version: nginx/1.18.0 (Ubuntu)
```
Download the source code corresponding to the installed version of NGINX (the complete sources are required even though only the dynamic module is being compiled):
```
$ wget http://nginx.org/download/nginx-1.18.0.tar.gz
$ tar zxvf nginx-1.18.0.tar.gz
```
Compile the dynamic module and copy it to the standard directory for modules:
```
$ cd nginx-1.18.0
$ ./configure --with-compat --add-dynamic-module=../ModSecurity-nginx
$ make modules
$ cp objs/ngx_http_modsecurity_module.so /etc/nginx/modules
$ cp objs/ngx_http_modsecurity_module.so /usr/share/nginx/modules/
$ cd ..
```
## Load the NGINX ModSecurity Connector Dynamic Module
Add the following load_module directive to the main (toplevel) context in /etc/nginx/nginx.conf. It instructs NGINX to load the ModSecurity dynamic module when it processes the configuration:
```
load_module modules/ngx_http_modsecurity_module.so;
```
## Configure and Enable ModSecurity
The final step is to enable and test ModSecurity.
Set up the appropriate ModSecurity configuration file. Here were using the recommended ModSecurity configuration provided by TrustWave Spiderlabs, the corporate sponsors of ModSecurity.
```
$ mkdir /etc/nginx/modsec
$ wget -P /etc/nginx/modsec/ https://raw.githubusercontent.com/SpiderLabs/ModSecurity/v3/master/modsecurity.conf-recommended
$ mv /etc/nginx/modsec/modsecurity.conf-recommended /etc/nginx/modsec/modsecurity.conf
```
To guarantee that ModSecurity can find the unicode.mapping file (distributed in the toplevel ModSecurity directory of the GitHub repo), copy it to /etc/nginx/modsec.
```
$ cp ModSecurity/unicode.mapping /etc/nginx/modsec
```
Change the SecRuleEngine directive in the configuration to change from the default “detection only” mode to actively dropping malicious traffic.
```
#SecRuleEngine DetectionOnly
SecRuleEngine On
```
# Enabling OWASP Core Rule Set
Clone OWASP CRS:
```
$ cd /etc/nginx/modsec
$ git clone https://github.com/coreruleset/coreruleset.git
```
Create CRS setup config file:
```
$ cp /etc/nginx/modsec/coreruleset/crs-setup.conf.example /etc/nginx/modsec/coreruleset/crs-setup.conf
```
Edit config file and enable a paranoia level of 2 (comment out section below and modify the paranoia level from 1 - default to 2):
```
SecAction \
"id:900000,\
phase:1,\
nolog,\
pass,\
t:none,\
setvar:tx.paranoia_level=2"
```
A Paranoia level of 2 is a good combination of security rules to load by the ModSec engine while keeping low the number of false positives.
The OWASP CRS team carried out some tests using BURP against ModSec + OWASP CRS:
![alt_text](images/owasp_burp.png "image_tooltip")
Create ModSecurity base config file (“/etc/nginx/modsec/modsec-base-cfg.conf”) and include the following lines (the order is important)`:`
```
Include /etc/nginx/modsec/modsecurity.conf
Include /etc/nginx/modsec/coreruleset/crs-setup.conf
Include /etc/nginx/modsec/coreruleset/rules/*.conf
```
Enable ModSec in all NGINX enabled sites:
“/etc/nginx/sites-enabled/frontend.conf”, “/etc/nginx/sites-enabled/rmm.conf” and “/etc/nginx/sites-enabled/meshcentral.conf”:
```
server {
modsecurity on;
modsecurity_rules_file /etc/nginx/modsec/modsec-base-cfg.conf;
…………………..
…………………..
```
Tactical RMM custom rules:
* Access to the admin UI (front-end): We apply the “deny by default, allow by exception” principle, whereby only a set of predefined public IPs should be allowed to access the UI
* API and Meshcentral: RMM agents and RMM UI (as referrer while an admin session is active) make web calls that get blocked by the OWASP CRS, specifically PUT, POST and PATCH methods. These three methods can be “whitelisted” when the requested URI matches legitimate requests.
* Connection to Meshcentral during Tactical agent install.
Create a .conf file under “/etc/nginx/modsec/coreruleset/rules” named “RMM-RULES.conf”, for example, with the following content:
```
#ADMIN UI/FRONTEND ACCESS - DENY BY DEFAULT, ALLOW BY EXCEPTION
SecRule SERVER_NAME "rmm.yourdomain.com" "id:1001,phase:1,nolog,msg:'Remote IP Not allowed',deny,chain"
### ALLOWED PUBLIC IP 1 #########
SecRule REMOTE_ADDR "!@eq IP1" chain
### ALLOWED PUBLIC IP 2 #########
SecRule REMOTE_ADDR "!@eq IP2" "t:none"
#API AND MESHCENTRAL - WHITELIST PUT, PATCH AND POST METHODS BY REQUESTED URI
SecRule REQUEST_URI "@beginsWith /api/v3/checkin" "id:1002,phase:1,t:none,nolog,allow,chain"
SecRule REQUEST_METHOD "PUT|PATCH" "t:none"
SecRule REQUEST_URI "@beginsWith /api/v3/checkrunner" "chain,id:'1003',phase:1,t:none,nolog,allow"
SecRule REQUEST_METHOD "PATCH" "t:none"
SecRule REQUEST_URI "@beginsWith /alerts/alerts" "chain,id:'1004',phase:1,t:none,nolog,allow"
SecRule REQUEST_METHOD "PATCH" "t:none"
SecRule REQUEST_URI "@beginsWith /agents/listagents" "chain,id:'1005',phase:1,t:none,nolog,allow"
SecRule REQUEST_METHOD "PATCH" "t:none"
SecRule REQUEST_URI "@beginsWith /api/v3/sysinfo" "chain,id:'1006',phase:1,t:none,nolog,allow"
SecRule REQUEST_METHOD "PATCH" "t:none"
SecRule REQUEST_URI "@beginsWith /api/v3/winupdates" "chain,id:'1007',phase:1,t:none,nolog,allow"
SecRule REQUEST_METHOD "POST"
##REQUIRED FOR MANAGEMENT ACTIONS FROM ADMIN/FRONT-END UI. WHITELIST BY REFERRER's URL
SecRule REQUEST_HEADERS:REFERER "https://rmm.yourdomain.com/" "id:1008,phase:1,nolog,ctl:ruleRemoveById=920170,allow"
#REQUIRED FOR NEW CLIENTS TO CONNECT TO MESH SERVICE WHILE INSTALLING THE AGENT
SecRule REQUEST_URI "@beginsWith /api/v3/meshexe" "id:1009,phase:1,nolog,ctl:ruleRemoveById=920170,allow"
### NOTE ON RULE ID = 920170 (WHITELISTED IN CASES ABOVE FOR TACTICAL RMM) ###
# Do not accept GET or HEAD requests with bodies
# HTTP standard allows GET requests to have a body but this
# feature is not used in real life. Attackers could try to force
# a request body on an unsuspecting web applications.
#
# -=[ Rule Logic ]=-
# This is a chained rule that first checks the Request Method. If it is a
# GET or HEAD method, then it checks for the existence of a Content-Length
# header. If the header exists and its payload is either not a 0 digit or not
# empty, then it will match.
#
# -=[ References ]=-
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3
###
```

View File

@@ -1,5 +1,16 @@
# Troubleshooting
#### Problems after new install
In the very unlikely event you have issues after install please wipe the box and install again (following all the steps including downloading the install script but not running it) use the following command which will log the install progress and if you continue to have issues will assist with support of the installation.
```bash
bash -x install.sh 2>&1 | tee install.log
```
!!!note
Logging of installs isnt desirable as it logs extremely sensitive information which is why this isnt done by default! **Do not** post the raw log publicly only provide it if requested and then by dm only. Authorized users in Discord are: @BurningTimes#1938 @sadnub#6992 @dinger1986#1734 @silversword#9652
#### "Bad credentials" error when trying to login to the Web UI
If you are sure you are using the correct credentials and still getting a "bad credentials" error, open your browser's dev tools (ctrl + shift + j on chrome) and check the Console tab to see the real error.

View File

@@ -1,4 +1,4 @@
# Unsupported Reference scripts
# Unsupported Reference Scripts
!!!note
These are not supported scripts/configurations by Tactical RMM, but it's provided here for your reference.
@@ -164,4 +164,366 @@ sudo echo "${tacticalfail2banjail}" > /etc/fail2ban/jail.d/tacticalrmm.local
```bash
sudo systemctl restart fail2ban
```
## Using purchased SSL certs instead of LetsEncrypt wildcards
Credit to [@dinger1986](https://github.com/dinger1986)
How to change certs used by Tactical RMM to purchased ones (this can be a wildcard cert).
You need to add the certificate private key and public keys to the following files:
`/etc/nginx/sites-available/rmm.conf`
`/etc/nginx/sites-available/meshcentral.conf`
`/etc/nginx/sites-available/frontend.conf`
`/rmm/api/tacticalrmm/tacticalrmm/local_settings.py`
1. create a new folder for certs and allow tactical user permissions (assumed to be tactical)
sudo mkdir /certs
sudo chown -R tactical:tactical /certs"
2. Now move your certs into that folder.
3. Open the api file and add the api certificate or if its a wildcard the directory should be `/certs/yourdomain.com/`
sudo nano /etc/nginx/sites-available/rmm.conf
replace
ssl_certificate /etc/letsencrypt/live/yourdomain.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/yourdomain.com/privkey.pem;
with
ssl_certificate /certs/api.yourdomain.com/fullchain.pem;
ssl_certificate_key /certs/api.yourdomain.com/privkey.pem;
4. Repeat the process for
/etc/nginx/sites-available/meshcentral.conf
/etc/nginx/sites-available/frontend.conf
but change api. to: mesh. and rmm. respectively.
7. Add the following to the last lines of `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py`
nano /rmm/api/tacticalrmm/tacticalrmm/local_settings.py
add
CERT_FILE = "/certs/api.yourdomain.com/fullchain.pem"
KEY_FILE = "/certs/api.yourdomain.com/privkey.pem"
6. Regenerate Nats Conf
cd /rmm/api/tacticalrmm
source ../env/bin/activate
python manage.py reload_nats
7. Restart services
sudo systemctl restart rmm celery celerybeat nginx nats natsapi
## Use certbot to do acme challenge over http
The standard SSL cert process in Tactical uses a [DNS challenge](https://letsencrypt.org/docs/challenge-types/#dns-01-challenge) that requires dns txt files to be updated with every run.
The below script uses [http challenge](https://letsencrypt.org/docs/challenge-types/#http-01-challenge) on the 3 separate ssl certs, one for each subdomain: rmm, api, mesh. They still have the same 3 month expiry. Restart the Tactical RMM server about every 2.5 months (80 days) for auto-renewed certs to become active.
!!!note
Your Tactical RMM server will need to have TCP Port: 80 exposed to the internet
```bash
#!/bin/bash
###Set colours same as Tactical RMM install and Update
YELLOW='\033[1;33m'
GREEN='\033[0;32m'
RED='\033[0;31m'
NC='\033[0m'
### Ubuntu 20.04 Check
UBU20=$(grep 20.04 "/etc/"*"release")
if ! [[ $UBU20 ]]; then
echo -ne "\033[0;31mThis script will only work on Ubuntu 20.04\e[0m\n"
exit 1
fi
cls() {
printf "\033c"
}
print_green() {
printf >&2 "${GREEN}%0.s-${NC}" {1..80}
printf >&2 "\n"
printf >&2 "${GREEN}${1}${NC}\n"
printf >&2 "${GREEN}%0.s-${NC}" {1..80}
printf >&2 "\n"
}
cls
### Set variables for domains
while [[ $rmmdomain != *[.]*[.]* ]]
do
echo -ne "${YELLOW}Enter the subdomain used for the backend (e.g. api.example.com)${NC}: "
read rmmdomain
done
while [[ $frontenddomain != *[.]*[.]* ]]
do
echo -ne "${YELLOW}Enter the subdomain used for the frontend (e.g. rmm.example.com)${NC}: "
read frontenddomain
done
while [[ $meshdomain != *[.]*[.]* ]]
do
echo -ne "${YELLOW}Enter the subdomain used for meshcentral (e.g. mesh.example.com)${NC}: "
read meshdomain
done
echo -ne "${YELLOW}Enter the current root domain (e.g. example.com or example.co.uk)${NC}: "
read rootdomain
### Setup Certificate Variables
CERT_PRIV_KEY=/etc/letsencrypt/live/${rootdomain}/privkey.pem
CERT_PUB_KEY=/etc/letsencrypt/live/${rootdomain}/fullchain.pem
### Make Letsencrypt directories
sudo mkdir /var/www/letsencrypt
sudo mkdir /var/www/letsencrypt/.mesh
sudo mkdir /var/www/letsencrypt/.rmm
sudo mkdir /var/www/letsencrypt/.api
### Remove config files for nginx
sudo rm /etc/nginx/sites-available/rmm.conf
sudo rm /etc/nginx/sites-available/meshcentral.conf
sudo rm /etc/nginx/sites-available/frontend.conf
sudo rm /etc/nginx/sites-enabled/rmm.conf
sudo rm /etc/nginx/sites-enabled/meshcentral.conf
sudo rm /etc/nginx/sites-enabled/frontend.conf
### Setup tactical nginx config files for letsencrypt
nginxrmm="$(cat << EOF
server_tokens off;
upstream tacticalrmm {
server unix:////rmm/api/tacticalrmm/tacticalrmm.sock;
}
map \$http_user_agent \$ignore_ua {
"~python-requests.*" 0;
"~go-resty.*" 0;
default 1;
}
server {
listen 80;
server_name ${rmmdomain};
location /.well-known/acme-challenge/ {
root /var/www/letsencrypt/.api/;}
location / {
return 301 https://\$server_name\$request_uri;}
}
server {
listen 443 ssl;
server_name ${rmmdomain};
client_max_body_size 300M;
access_log /rmm/api/tacticalrmm/tacticalrmm/private/log/access.log;
error_log /rmm/api/tacticalrmm/tacticalrmm/private/log/error.log;
ssl_certificate ${CERT_PUB_KEY};
ssl_certificate_key ${CERT_PRIV_KEY};
ssl_ciphers 'ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384';
location /static/ {
root /rmm/api/tacticalrmm;
}
location /private/ {
internal;
add_header "Access-Control-Allow-Origin" "https://${frontenddomain}";
alias /rmm/api/tacticalrmm/tacticalrmm/private/;
}
location ~ ^/ws/ {
proxy_pass http://unix:/rmm/daphne.sock;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_redirect off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $server_name;
}
location /saltscripts/ {
internal;
add_header "Access-Control-Allow-Origin" "https://${frontenddomain}";
alias /srv/salt/scripts/userdefined/;
}
location /builtin/ {
internal;
add_header "Access-Control-Allow-Origin" "https://${frontenddomain}";
alias /srv/salt/scripts/;
}
location ~ ^/(natsapi) {
allow 127.0.0.1;
deny all;
uwsgi_pass tacticalrmm;
include /etc/nginx/uwsgi_params;
uwsgi_read_timeout 500s;
uwsgi_ignore_client_abort on;
}
location / {
uwsgi_pass tacticalrmm;
include /etc/nginx/uwsgi_params;
uwsgi_read_timeout 9999s;
uwsgi_ignore_client_abort on;
}
}
EOF
)"
echo "${nginxrmm}" | sudo tee /etc/nginx/sites-available/rmm.conf > /dev/null
nginxmesh="$(cat << EOF
server {
listen 80;
server_name ${meshdomain};
location /.well-known/acme-challenge/ {
root /var/www/letsencrypt/.mesh/;}
location / {
return 301 https://\$server_name\$request_uri;}
}
server {
listen 443 ssl;
proxy_send_timeout 330s;
proxy_read_timeout 330s;
server_name ${meshdomain};
ssl_certificate ${CERT_PUB_KEY};
ssl_certificate_key ${CERT_PRIV_KEY};
ssl_session_cache shared:WEBSSL:10m;
ssl_ciphers HIGH:!aNULL:!MD5;
ssl_prefer_server_ciphers on;
location / {
proxy_pass http://127.0.0.1:4430/;
proxy_http_version 1.1;
proxy_set_header Host \$host;
proxy_set_header Upgrade \$http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header X-Forwarded-Host \$host:\$server_port;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto \$scheme;
}
}
EOF
)"
echo "${nginxmesh}" | sudo tee /etc/nginx/sites-available/meshcentral.conf > /dev/null
nginxfrontend="$(cat << EOF
server {
server_name ${frontenddomain};
charset utf-8;
location / {
root /var/www/rmm/dist;
try_files \$uri \$uri/ /index.html;
add_header Cache-Control "no-store, no-cache, must-revalidate";
add_header Pragma "no-cache";
}
error_log /var/log/nginx/frontend-error.log;
access_log /var/log/nginx/frontend-access.log;
listen 443 ssl;
ssl_certificate ${CERT_PUB_KEY};
ssl_certificate_key ${CERT_PRIV_KEY};
ssl_ciphers 'ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384';
}
server {
listen 80;
server_name ${frontenddomain};
location /.well-known/acme-challenge/ {
root /var/www/letsencrypt/.rmm/;}
location / {
return 301 https://\$host\$request_uri;}
}
EOF
)"
echo "${nginxfrontend}" | sudo tee /etc/nginx/sites-available/frontend.conf > /dev/null
### Relink nginx config files
sudo ln -s /etc/nginx/sites-available/rmm.conf /etc/nginx/sites-enabled/rmm.conf
sudo ln -s /etc/nginx/sites-available/meshcentral.conf /etc/nginx/sites-enabled/meshcentral.conf
sudo ln -s /etc/nginx/sites-available/frontend.conf /etc/nginx/sites-enabled/frontend.conf
### Restart nginx
sudo systemctl restart nginx
### Get letsencrypt Certs
sudo letsencrypt certonly --webroot -w /var/www/letsencrypt/.mesh/ -d ${meshdomain}
sudo letsencrypt certonly --webroot -w /var/www/letsencrypt/.rmm/ -d ${frontenddomain}
sudo letsencrypt certonly --webroot -w /var/www/letsencrypt/.api/ -d ${rmmdomain}
### Ensure letsencrypt Permissions are correct
sudo chown ${USER}:${USER} -R /etc/letsencrypt
sudo chmod 775 -R /etc/letsencrypt
### Set variables for new certs
CERT_PRIV_KEY_API=/etc/letsencrypt/live/${rmmdomain}/privkey.pem
CERT_PUB_KEY_API=/etc/letsencrypt/live/${rmmdomain}/fullchain.pem
CERT_PRIV_KEY_RMM=/etc/letsencrypt/live/${frontenddomain}/privkey.pem
CERT_PUB_KEY_RMM=/etc/letsencrypt/live/${frontenddomain}/fullchain.pem
CERT_PRIV_KEY_MESH=/etc/letsencrypt/live/${meshdomain}/privkey.pem
CERT_PUB_KEY_MESH=/etc/letsencrypt/live/${meshdomain}/fullchain.pem
### Replace certs in files
rmmlocalsettings="$(cat << EOF
CERT_FILE = "${CERT_PUB_KEY_API}"
KEY_FILE = "${CERT_PRIV_KEY_API}"
EOF
)"
echo "${rmmlocalsettings}" | tee --append /rmm/api/tacticalrmm/tacticalrmm/local_settings.py > /dev/null
sudo sed -i "s|${CERT_PRIV_KEY}|${CERT_PRIV_KEY_API}|g" /etc/nginx/sites-available/rmm.conf
sudo sed -i "s|${CERT_PUB_KEY}|${CERT_PUB_KEY_API}|g" /etc/nginx/sites-available/rmm.conf
sudo sed -i "s|${CERT_PRIV_KEY}|${CERT_PRIV_KEY_MESH}|g" /etc/nginx/sites-available/meshcentral.conf
sudo sed -i "s|${CERT_PUB_KEY}|${CERT_PUB_KEY_MESH}|g" /etc/nginx/sites-available/meshcentral.conf
sudo sed -i "s|${CERT_PRIV_KEY}|${CERT_PRIV_KEY_RMM}|g" /etc/nginx/sites-available/frontend.conf
sudo sed -i "s|${CERT_PUB_KEY}|${CERT_PUB_KEY_RMM}|g" /etc/nginx/sites-available/frontend.conf
### Remove Wildcard Cert
rm -r /etc/letsencrypt/live/${rootdomain}/
rm -r /etc/letsencrypt/archive/${rootdomain}/
rm /etc/letsencrypt/renewal/${rootdomain}.conf
### Regenerate Nats Conf
cd /rmm/api/tacticalrmm
source ../env/bin/activate
python manage.py reload_nats
### Restart services
for i in rmm celery celerybeat nginx nats natsapi
do
printf >&2 "${GREEN}Restarting ${i} service...${NC}\n"
sudo systemctl restart ${i}
done
###Renew certs can be done by sudo letsencrypt renew (this should automatically be in /etc/cron.d/certbot)
```

View File

@@ -23,6 +23,7 @@ nav:
- "Scripting": functions/scripting.md
- "URL Actions": functions/url_actions.md
- "User Interface Preferences": functions/user_ui.md
- "API Access": functions/api.md
- "Examples": functions/examples.md
- "Database Maintenace": functions/database_maintenance.md
- Backup: backup.md
@@ -38,10 +39,12 @@ nav:
- "Grafana": 3rdparty_grafana.md
- "TeamViewer": 3rdparty_teamviewer.md
- Tips n' Tricks: tipsntricks.md
- Securing NGINX: securing_nginx.md
- Contributing:
- "Contributing to Docs": contributing.md
- "Contributing using VSCode": contributing_using_vscode.md
- "Contributing to Community Scripts": contributing_community_scripts.md
- "Contributing using VSCode": contributing_using_vscode.md
- "Contributing using Docker": contributing_using_docker.md
- License: license.md
site_description: "A remote monitoring and management tool"
site_author: "wh1te909"

View File

@@ -1,6 +1,6 @@
#!/bin/bash
SCRIPT_VERSION="52"
SCRIPT_VERSION="53"
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh'
sudo apt install -y curl wget dirmngr gnupg lsb-release
@@ -324,24 +324,6 @@ DATABASES = {
}
}
REST_FRAMEWORK = {
'DATETIME_FORMAT': "%b-%d-%Y - %H:%M",
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'knox.auth.TokenAuthentication',
),
}
if not DEBUG:
REST_FRAMEWORK.update({
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
})
MESH_USERNAME = "${meshusername}"
MESH_SITE = "https://${meshdomain}"
REDIS_HOST = "localhost"

View File

@@ -0,0 +1,9 @@
rem Block Win11 upgrade
reg add HKLM\SOFTWARE\Policies\Microsoft\Windows\WindowsUpdate /f /v TargetReleaseVersion /t REG_DWORD /d 1
reg add HKLM\SOFTWARE\Policies\Microsoft\Windows\WindowsUpdate /f /v TargetReleaseVersionInfo /t REG_SZ /d 21H2
rem classic start menu and left side settings:
reg add HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced\Start_ShowClassicMode /t REG_DWORD /d 1
reg add HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced\TaskbarAl /t REG_DWORD /d 0

View File

@@ -0,0 +1,302 @@
<#
From https://smsagent.blog/2018/08/15/create-disk-usage-reports-with-powershell-and-wiztree/
To use the script, simply download the WizTree Portable app, extract the WizTree64.exe and place it in the same location as the script (assuming 64-bit OS). Set the run location in the script (ie $PSScriptRoot if calling the script, or the directory location if running in the ISE), the temporary location where it can create files, and the server share where you want to copy the reports to. Then just run the script in admin context.
#>
# Script to export html and csv reports of file and directory content on the system drive
# Use to identify large files/directories for disk space cleanup
# Uses WizTree portable to quickly retrieve file and directory sizes from the Master File Table on disk
# Download and extract the WizTree64.exe and place in the same directory as this script
# Set the running location
$RunLocation = $PSScriptRoot
#$RunLocation = "C:\temp"
$TempLocation = "C:\temp"
# Set Target share to copy the reports to
$TargetRoot = "\\server-01\sharename\DirectorySizeInfo"
# Free disk space thresholds (percentages) for summary report
$script:Thresholds = @{}
$Thresholds.Warning = 80
$Thresholds.Critical = 90
# Custom function to exit with a specific code
function ExitWithCode {
param
(
$exitcode
)
$host.SetShouldExit($exitcode)
exit
}
# Function to set the progress bar colour based on the the threshold value in the summary report
function Set-PercentageColour {
param(
[int]$Value
)
If ($Value -lt $Thresholds.Warning) {
$Hex = "#00ff00" # Green
}
If ($Value -ge $Thresholds.Warning -and $Value -lt $Thresholds.Critical) {
$Hex = "#ff9900" # Amber
}
If ($Value -ge $Thresholds.Critical) {
$Hex = "#FF0000" # Red
}
Return $Hex
}
# Define Html CSS style
$Style = @"
<style>
table {
border-collapse: collapse;
}
td, th {
border: 1px solid #ddd;
padding: 8px;
}
th {
padding-top: 12px;
padding-bottom: 12px;
text-align: left;
background-color: #4286f4;
color: white;
}
</style>
"@
# Set the filenames of WizTree csv's
$FilesCSV = "Files_$(Get-Date Format 'yyyyMMdd_hhmmss').csv"
$FoldersCSV = "Folders_$(Get-Date Format 'yyyyMMdd_hhmmss').csv"
# Set the filenames of customised csv's
$ExportedFilesCSV = "Exported_Files_$(Get-Date Format 'yyyyMMdd_hhmmss').csv"
$ExportedFoldersCSV = "Exported_Folders_$(Get-Date Format 'yyyyMMdd_hhmmss').csv"
# Set the filenames of html reports
$ExportedFilesHTML = "Largest_Files_$(Get-Date Format 'yyyyMMdd_hhmmss').html"
$ExportedFoldersHTML = "Largest_Folders_$(Get-Date Format 'yyyyMMdd_hhmmss').html"
$SummaryHTMLReport = "Disk_Usage_Summary_$(Get-Date Format 'yyyyMMdd_hhmmss').html"
# Run the WizTree portable app
Start-Process FilePath "$RunLocation\WizTree64.exe" ArgumentList """$Env:SystemDrive"" /export=""$TempLocation\$FilesCSV"" /admin 1 /sortby=2 /exportfolders=0" Verb runas Wait
Start-Process FilePath "$RunLocation\WizTree64.exe" ArgumentList """$Env:SystemDrive"" /export=""$TempLocation\$FoldersCSV"" /admin 1 /sortby=2 /exportfiles=0" Verb runas Wait
#region Files
# Remove the first 2 rows from the CSVs to leave just the relevant data
$CSVContent = Get-Content Path $TempLocation\$FilesCSV ReadCount 0
$CSVContent = $CSVContent | Select Skip 1
$CSVContent = $CSVContent | Select Skip 1
# Create a table to store the results
$Table = [System.Data.DataTable]::new("Directory Structure")
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Name", [System.String]))
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Size (Bytes)", [System.Int64]))
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Size (KB)", [System.Decimal]))
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Size (MB)", [System.Decimal]))
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Size (GB)", [System.Decimal]))
# Populate the table from the CSV data
Foreach ($csvrow in $CSVContent) {
$Content = $csvrow.split(',')
[void]$Table.rows.Add(($Content[0].Replace('"', '')), $Content[2], ([math]::Round(($Content[2] / 1KB), 2)), ([math]::Round(($Content[2] / 1MB), 2)), ([math]::Round(($Content[2] / 1GB), 2)))
}
# Export the table to a new CSV
$Table | Sort 'Size (Bytes)' Descending | Export-CSV Path $TempLocation\$ExportedFilesCSV NoTypeInformation UseCulture
# Export the largest 100 results into html format
$Table |
Sort 'Size (Bytes)' Descending |
Select First 100 |
ConvertTo-Html Property 'Name', 'Size (Bytes)', 'Size (KB)', 'Size (MB)', 'Size (GB)' Head $style Body "<h2>100 largest files on $env:COMPUTERNAME</h2>" CssUri "http://www.w3schools.com/lib/w3.css" |
Out-String | Out-File $TempLocation\$ExportedFilesHTML
#endregion
#region Folders
# Remove the first 2 rows from the CSVs to leave just the relevant data
$CSVContent = Get-Content Path $TempLocation\$FoldersCSV ReadCount 0
$CSVContent = $CSVContent | Select Skip 1
$CSVContent = $CSVContent | Select Skip 1
# Create a table to store the results
$Table = [System.Data.DataTable]::new("Directory Structure")
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Name", [System.String]))
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Size (Bytes)", [System.Int64]))
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Size (KB)", [System.Decimal]))
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Size (MB)", [System.Decimal]))
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Size (GB)", [System.Decimal]))
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Files", [System.String]))
[void]$Table.Columns.Add([System.Data.DataColumn]::new("Folders", [System.String]))
# Populate the table from the CSV data
Foreach ($csvrow in $CSVContent) {
$Content = $csvrow.split(',')
[void]$Table.rows.Add($($Content[0].Replace('"', '')), $Content[2], ([math]::Round(($Content[2] / 1KB), 2)), ([math]::Round(($Content[2] / 1MB), 2)), ([math]::Round(($Content[2] / 1GB), 2)), $Content[5], $Content[6])
}
# Export the table to a new CSV
$Table | Sort 'Size (Bytes)' Descending | Export-CSV Path $TempLocation\$ExportedFoldersCSV NoTypeInformation UseCulture
# Export the largest 100 results into html format
$Table |
Sort 'Size (Bytes)' Descending |
Select First 100 |
ConvertTo-Html Property 'Name', 'Size (Bytes)', 'Size (KB)', 'Size (MB)', 'Size (GB)', 'Files', 'Folders' Head $style Body "<h2>100 largest directories on $env:COMPUTERNAME</h2>" CssUri "http://www.w3schools.com/lib/w3.css" |
Out-String | Out-File $TempLocation\$ExportedFoldersHTML
#endregion
#region Create HTML disk usage summary report
# Get system drive data
$WMIDiskInfo = Get-CimInstance ClassName Win32_Volume Property Capacity, FreeSpace, DriveLetter | Where { $_.DriveLetter -eq $env:SystemDrive } | Select Capacity, FreeSpace, DriveLetter
$DiskInfo = [pscustomobject]@{
DriveLetter = $WMIDiskInfo.DriveLetter
'Capacity (GB)' = [math]::Round(($WMIDiskInfo.Capacity / 1GB), 2)
'FreeSpace (GB)' = [math]::Round(($WMIDiskInfo.FreeSpace / 1GB), 2)
'UsedSpace (GB)' = [math]::Round((($WMIDiskInfo.Capacity / 1GB) ($WMIDiskInfo.FreeSpace / 1GB)), 2)
'Percent Free' = [math]::Round(($WMIDiskInfo.FreeSpace * 100 / $WMIDiskInfo.Capacity), 2)
'Percent Used' = [math]::Round((($WMIDiskInfo.Capacity $WMIDiskInfo.FreeSpace) * 100 / $WMIDiskInfo.Capacity), 2)
}
# Create html header
$html = @"
<!DOCTYPE html>
<html>
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" href="http://www.w3schools.com/lib/w3.css"&gt;
<body>
"@
# Set html
$html = $html + @"
<h2>Disk Space Usage for Drive $($DiskInfo.DriveLetter) on $env:COMPUTERNAME</h2>
<table cellpadding="0" cellspacing="0" width="700">
<tr>
<td style="background-color:$(Set-PercentageColour Value $($DiskInfo.'Percent Used'));padding:10px;color:#ffffff;" width="$($DiskInfo.'Percent Used')%">
$($DiskInfo.'UsedSpace (GB)') GB ($($DiskInfo.'Percent Used') %)
</td>
<td style="background-color:#eeeeee;padding-top:10px;padding-bottom:10px;color:#333333;" width="$($DiskInfo.'Percent Used')%">
</td>
</tr>
</table>
<table cellpadding="0" cellspacing="0" width="700">
<tr>
<td style="padding:5px;" width="80%">
Capacity: $($DiskInfo.'Capacity (GB)') GB
</td>
</tr>
<tr>
<td style="padding:5px;" width="80%">
FreeSpace: $($DiskInfo.'FreeSpace (GB)') GB
</td>
</tr>
<tr>
<td style="padding:5px;" width="80%">
Percent Free: $($DiskInfo.'Percent Free') %
</td>
</tr>
</table>
"@
If ($DiskInfo.'FreeSpace (GB)' -lt 20) {
$html = $html + @"
<table cellpadding="0" cellspacing="0" width="700">
<tr>
<td style="padding:5px;color:red;font-weight:bold" width="80%">
You need to free $(20 $DiskInfo.'FreeSpace (GB)') GB on this disk to pass the W10 readiness check!
</td>
</tr>
</table>
"@
}
# Close html document
$html = $html + @"
</body>
</html>
"@
# Export to file
$html |
Out-string |
Out-File $TempLocation\$SummaryHTMLReport
#endregion
#region Copy files to share
# Create a subfolder with computername if doesn't exist
If (!(Test-Path $TargetRoot\$env:COMPUTERNAME)) {
$null = New-Item Path $TargetRoot Name $env:COMPUTERNAME ItemType Directory
}
# Create a subdirectory with current date-time
$DateString = ((Get-Date).ToUniversalTime() | get-date Format "yyyy-MM-dd_HH-mm-ss").ToString()
If (!(Test-Path $TargetRoot\$env:COMPUTERNAME\$DateString)) {
$null = New-Item Path $TargetRoot\$env:COMPUTERNAME Name $DateString ItemType Directory
}
# Set final target location
$TargetLocation = "$TargetRoot\$env:COMPUTERNAME\$DateString"
# Copy files
$Files = @(
$ExportedFilesCSV
$ExportedFoldersCSV
$ExportedFilesHTML
$ExportedFoldersHTML
$SummaryHTMLReport
)
Try {
Robocopy $TempLocation $TargetLocation $Files /R:10 /W:5 /NP
}
Catch {}
#endregion
# Cleanup temp files
$Files = @(
$FilesCSV
$FoldersCSV
$ExportedFilesCSV
$ExportedFoldersCSV
$ExportedFilesHTML
$ExportedFoldersHTML
$SummaryHTMLReport
)
Foreach ($file in $files) {
Remove-Item Path $TempLocation\$file Force
}
# Force a code 0 on exit, in case of some non-terminating error.
ExitWithCode 0

View File

@@ -0,0 +1,17 @@
# extract WizTree
Expand-Archive C:\temp\wiztree_3_26_portable.zip -DestinationPath C:\temp\wiztree
# run wiztree.exe against provided drive/path
# generates diskusage.csv file and uploads to asset, deletes local file after upload
# If 32-bit
if ([System.IntPtr]::Size -eq 4) {
C:\temp\wiztree\wiztree.exe "$scanpath" /export="c:\temp\wiztree\diskusage.csv" /admin=1 /exportfolders=1 /exportfiles=0 /sortby=2 | Out-Null
}
else {
C:\temp\wiztree\wiztree64.exe "$scanpath" /export="c:\temp\wiztree\diskusage.csv" /admin=1 /exportfolders=1 /exportfiles=0 /sortby=2 | Out-Null
}
# This will upload the file to Syncro and attach it to the Asset.
Upload-File -Subdomain "$subdomain" -FilePath "C:\temp\wiztree\diskusage.csv"
# Delete local file after upload
Remove-Item -Path "C:\temp\wiztree\diskusage.csv" -Force

View File

@@ -0,0 +1,18 @@
$ErrorActionPreference= 'silentlycontinue'
$smartst = (Get-WmiObject -namespace root\wmi -class MSStorageDriver_FailurePredictStatus).PredictFailure
if ($smartst = 'False')
{
Write-Output "Theres no SMART Failures predicted"
exit 0
}
else
{
Write-Output "There are SMART Failures detected"
exit 1
}
Exit $LASTEXITCODE

View File

@@ -0,0 +1,129 @@
# If this is a virtual machine, we don't need to continue
$Computer = Get-CimInstance -ClassName 'Win32_ComputerSystem'
if ($Computer.Model -like 'Virtual*') {
exit
}
$disks = (Get-CimInstance -Namespace 'Root\WMI' -ClassName 'MSStorageDriver_FailurePredictStatus' |
Select-Object 'InstanceName')
$Warnings = @()
foreach ($disk in $disks.InstanceName) {
# Retrieve SMART data
$SmartData = (Get-CimInstance -Namespace 'Root\WMI' -ClassName 'MSStorageDriver_ATAPISMartData' |
Where-Object 'InstanceName' -eq $disk)
[Byte[]]$RawSmartData = $SmartData | Select-Object -ExpandProperty 'VendorSpecific'
# Starting at the third number (first two are irrelevant)
# get the relevant data by iterating over every 12th number
# and saving the values from an offset of the SMART attribute ID
[PSCustomObject[]]$Output = for ($i = 2; $i -lt $RawSmartData.Count; $i++) {
if (0 -eq ($i - 2) % 12 -and $RawSmartData[$i] -ne 0) {
# Construct the raw attribute value by combining the two bytes that make it up
[Decimal]$RawValue = ($RawSmartData[$i + 6] * [Math]::Pow(2, 8) + $RawSmartData[$i + 5])
$InnerOutput = [PSCustomObject]@{
DiskID = $disk
ID = $RawSmartData[$i]
#Flags = $RawSmartData[$i + 1]
#Value = $RawSmartData[$i + 3]
Worst = $RawSmartData[$i + 4]
RawValue = $RawValue
}
$InnerOutput
}
}
# Reallocated Sectors Count
$Warnings += $Output | Where-Object ID -eq 5 | Where-Object RawValue -gt 1 | Format-Table
# Spin Retry Count
$Warnings += $Output | Where-Object ID -eq 10 | Where-Object RawValue -ne 0 | Format-Table
# Recalibration Retries
$Warnings += $Output | Where-Object ID -eq 11 | Where-Object RawValue -ne 0 | Format-Table
# Used Reserved Block Count Total
$Warnings += $Output | Where-Object ID -eq 179 | Where-Object RawValue -gt 1 | Format-Table
# Erase Failure Count
$Warnings += $Output | Where-Object ID -eq 182 | Where-Object RawValue -ne 0 | Format-Table
# SATA Downshift Error Count or Runtime Bad Block
$Warnings += $Output | Where-Object ID -eq 183 | Where-Object RawValue -ne 0 | Format-Table
# End-to-End error / IOEDC
$Warnings += $Output | Where-Object ID -eq 184 | Where-Object RawValue -ne 0 | Format-Table
# Reported Uncorrectable Errors
$Warnings += $Output | Where-Object ID -eq 187 | Where-Object RawValue -ne 0 | Format-Table
# Command Timeout
$Warnings += $Output | Where-Object ID -eq 188 | Where-Object RawValue -gt 2 | Format-Table
# High Fly Writes
$Warnings += $Output | Where-Object ID -eq 189 | Where-Object RawValue -ne 0 | Format-Table
# Temperature Celcius
$Warnings += $Output | Where-Object ID -eq 194 | Where-Object RawValue -gt 50 | Format-Table
# Reallocation Event Count
$Warnings += $Output | Where-Object ID -eq 196 | Where-Object RawValue -ne 0 | Format-Table
# Current Pending Sector Count
$Warnings += $Output | Where-Object ID -eq 197 | Where-Object RawValue -ne 0 | Format-Table
# Uncorrectable Sector Count
$Warnings += $Output | Where-Object ID -eq 198 | Where-Object RawValue -ne 0 | Format-Table
# UltraDMA CRC Error Count
$Warnings += $Output | Where-Object ID -eq 199 | Where-Object RawValue -ne 0 | Format-Table
# Soft Read Error Rate
$Warnings += $Output | Where-Object ID -eq 201 | Where-Object Worst -lt 95 | Format-Table
# SSD Life Left
$Warnings += $Output | Where-Object ID -eq 231 | Where-Object Worst -lt 50 | Format-Table
# SSD Media Wear Out Indicator
$Warnings += $Output | Where-Object ID -eq 233 | Where-Object Worst -lt 50 | Format-Table
}
$Warnings += Get-CimInstance -Namespace 'Root\WMI' -ClassName 'MSStorageDriver_FailurePredictStatus' |
Select-Object InstanceName, PredictFailure, Reason |
Where-Object {$_.PredictFailure -ne $False} | Format-Table
$Warnings += Get-CimInstance -ClassName 'Win32_DiskDrive' |
Select-Object Model, SerialNumber, Name, Size, Status |
Where-Object {$_.status -ne 'OK'} | Format-Table
$Warnings += Get-PhysicalDisk |
Select-Object FriendlyName, Size, MediaType, OperationalStatus, HealthStatus |
Where-Object {$_.OperationalStatus -ne 'OK' -or $_.HealthStatus -ne 'Healthy'} | Format-Table
if ($Warnings) {
$Warnings = $warnings | Out-String
$Warnings
Write-Output "There are SMART impending Failures"
Write-Output "$Warnings"
Exit 2
}
elseif ($Error) {
Write-Output "There were errors detecting smart on this system"
Write-Output "$Error"
exit 1
}
else
{
Write-Output "There are no SMART Failures detected"
exit 0
}
Exit $LASTEXITCODE

View File

@@ -0,0 +1,18 @@
# https://github.com/knk90
# Installs Angry IP scanner using choco, runs a scan of the network and then uninstalls it
choco.exe install angryip -y
$PSDefaultParameterValues['*:Encoding'] = 'ascii'
$ips = get-netipaddress -AddressFamily ipv4 | select-object ipaddress
foreach ($i in $ips) {
$split = $i.ipaddress.Split(".")
$startrange = $split[0] + "." + $split[1] + "." + $split[2] + "." + "1"
$endrange = $split[0] + "." + $split[1] + "." + $split[2] + "." + "254"
$command = "`"c:\Program Files\Angry IP Scanner\ipscan.exe`" -f:range " + $startrange + " " + $endrange + " -s -q -o c:\programdata\ipscanoutput.txt`""
if ($startrange -notlike "*127.0*") {
$command | Out-file -Encoding ASCII c:\programdata\ipscan.bat
c:\programdata\ipscan.bat
type c:\programdata\ipscanoutput.txt
}
}
choco.exe uninstall angryip -y

View File

@@ -5,6 +5,8 @@
# Win 8.1 x64 and Svr 2012 R2 x64 https://download.microsoft.com/download/6/F/5/6F5FF66C-6775-42B0-86C4-47D41F2DA187/Win8.1AndW2K12R2-KB3191564-x64.msu
# Win 81 x32 https://download.microsoft.com/download/6/F/5/6F5FF66C-6775-42B0-86C4-47D41F2DA187/Win8.1-KB3191564-x86.msu
# See https://github.com/wh1te909/tacticalrmm/blob/develop/scripts_wip/Win_Powershell_Version_Check.ps1 for alert script to warn when this is needed
if ($PSVersionTable.PSVersion.Major -lt 5) {
Write-Output "Old Version - Need to Upgrade"
# Download MSU file - EDIT THIS URL

View File

@@ -0,0 +1,12 @@
# Use as check script for old Powershell version 2.0 (aka Win7) and upgrade using https://github.com/wh1te909/tacticalrmm/blob/develop/scripts_wip/Win_Powershell_Upgrade.ps1
if ($PSVersionTable.PSVersion.Major -gt 2) {
$PSVersionTable.PSVersion.Major
Write-Output "PSVersion Greater than 2.0"
exit 0
}
else {
$PSVersionTable.PSVersion.Major
Write-Output "PSVersion less than 2.0"
exit 1
}

View File

@@ -0,0 +1,22 @@
# Checking for insecure by design print features being enabled
# See https://support.microsoft.com/en-us/topic/kb5005010-restricting-installation-of-new-printer-drivers-after-applying-the-july-6-2021-updates-31b91c02-05bc-4ada-a7ea-183b129578a7
$PointAndPrintNoElevation = (Get-ItemProperty -Path "HKLM:\Software\Policies\Microsoft\Windows NT\Printers\PointAndPrintNoElevation").NoWarningNoElevationOnInstall
$PointAndPrintUpdatePrompt = (Get-ItemProperty -Path "HKLM:\Software\Policies\Microsoft\Windows NT\Printers\PointAndPrintNoElevation").UpdatePromptSettings
if ($PointAndPrintNoElevation -Eq 1) {
Write-Output "Point and Print WarningNoElevationOnInstall set to true. WARNING: You are insecure-by-design."
exit 1
}
elseif ($PointAndPrintUpdatePrompt -Eq 1) {
Write-Output "Point and Print PointAndPrintUpdatePrompt set to true. WARNING: You are insecure-by-design."
exit 1
}
else {
Write-Output "WarningNoElevationOnInstall UpdatePromptSettings set to false. No vulnerabilities"
exit 0
}
Exit $LASTEXITCODE

View File

@@ -0,0 +1,33 @@
$runpath = "C:\TechTools\Speedtest\Speedtest.exe"
$zippath = "C:\TechTools\Zip\"
$toolpath = "C:\TechTools\Speedtest\"
$Url = "https://install.speedtest.net/app/cli/ookla-speedtest-1.0.0-win64.zip"
$DownloadZipFile = "C:\TechTools\Zip\" + $(Split-Path -Path $Url -Leaf)
$ExtractPath = "C:\TechTools\Speedtest\"
#Check for speedtest cli executable, if missing it will check for and create folders required,
#download speedtest cli zip file from $URL and extract into correct folder
IF(!(test-path $runpath))
{
#Check for SpeedTest folder, if missing, create
If(!(test-path $toolpath))
{
New-Item -ItemType Directory -Force -Path $toolpath
}
#Check for zip folder, if missing, create
If(!(test-path $zippath))
{
New-Item -ItemType Directory -Force -Path $zippath
}
#Download and extract zip from the URL in $URL
Invoke-WebRequest -Uri $Url -OutFile $DownloadZipFile
$ExtractShell = New-Object -ComObject Shell.Application
$ExtractFiles = $ExtractShell.Namespace($DownloadZipFile).Items()
$ExtractShell.NameSpace($ExtractPath).CopyHere($ExtractFiles)
}
& $runpath

View File

@@ -0,0 +1,65 @@
Import-Module $env:SyncroModule
$Random = get-random -min 1 -max 100
start-sleep $random
######### Absolute monitoring values ##########
$maxpacketloss = 2 #how much % packetloss until we alert.
$MinimumDownloadSpeed = 10 #What is the minimum expected download speed in Mbit/ps
$MinimumUploadSpeed = 1 #What is the minimum expected upload speed in Mbit/ps
######### End absolute monitoring values ######
#Replace the Download URL to where you've uploaded the ZIP file yourself. We will only download this file once.
#Latest version can be found at: https://www.speedtest.net/nl/apps/cli
$DownloadURL = "https://bintray.com/ookla/download/download_file?file_path=ookla-speedtest-1.0.0-win64.zip"
$DownloadLocation = "$($Env:ProgramData)\SpeedtestCLI"
try {
$TestDownloadLocation = Test-Path $DownloadLocation
if (!$TestDownloadLocation) {
new-item $DownloadLocation -ItemType Directory -force
Invoke-WebRequest -Uri $DownloadURL -OutFile "$($DownloadLocation)\speedtest.zip"
Expand-Archive "$($DownloadLocation)\speedtest.zip" -DestinationPath $DownloadLocation -Force
}
}
catch {
write-host "The download and extraction of SpeedtestCLI failed. Error: $($_.Exception.Message)"
exit 1
}
$PreviousResults = if (test-path "$($DownloadLocation)\LastResults.txt") { get-content "$($DownloadLocation)\LastResults.txt" | ConvertFrom-Json }
$SpeedtestResults = & "$($DownloadLocation)\speedtest.exe" --format=json --accept-license --accept-gdpr
$SpeedtestResults | Out-File "$($DownloadLocation)\LastResults.txt" -Force
$SpeedtestResults = $SpeedtestResults | ConvertFrom-Json
#creating object
[PSCustomObject]$SpeedtestObj = @{
downloadspeed = [math]::Round($SpeedtestResults.download.bandwidth / 1000000 * 8, 2)
uploadspeed = [math]::Round($SpeedtestResults.upload.bandwidth / 1000000 * 8, 2)
packetloss = [math]::Round($SpeedtestResults.packetLoss)
isp = $SpeedtestResults.isp
ExternalIP = $SpeedtestResults.interface.externalIp
InternalIP = $SpeedtestResults.interface.internalIp
UsedServer = $SpeedtestResults.server.host
ResultsURL = $SpeedtestResults.result.url
Jitter = [math]::Round($SpeedtestResults.ping.jitter)
Latency = [math]::Round($SpeedtestResults.ping.latency)
}
$SpeedtestHealth = @()
#Comparing against previous result. Alerting is download or upload differs more than 20%.
if ($PreviousResults) {
if ($PreviousResults.download.bandwidth / $SpeedtestResults.download.bandwidth * 100 -le 80) { $SpeedtestHealth += "Download speed difference is more than 20%" }
if ($PreviousResults.upload.bandwidth / $SpeedtestResults.upload.bandwidth * 100 -le 80) { $SpeedtestHealth += "Upload speed difference is more than 20%" }
}
#Comparing against preset variables.
if ($SpeedtestObj.downloadspeed -lt $MinimumDownloadSpeed) { $SpeedtestHealth += "Download speed is lower than $MinimumDownloadSpeed Mbit/ps" }
if ($SpeedtestObj.uploadspeed -lt $MinimumUploadSpeed) { $SpeedtestHealth += "Upload speed is lower than $MinimumUploadSpeed Mbit/ps" }
if ($SpeedtestObj.packetloss -gt $MaxPacketLoss) { $SpeedtestHealth += "Packetloss is higher than $maxpacketloss%" }
if (!$SpeedtestHealth) {
$SpeedtestHealth = "Healthy"
}
Set-Asset-Field -Subdomain "fresh-tech" -Name "Download Speed" -Value $SpeedtestObj.downloadspeed
Set-Asset-Field -Subdomain "fresh-tech" -Name "Upload Speed" -Value $SpeedtestObj.uploadspeed
Set-Asset-Field -Subdomain "fresh-tech" -Name "Packet Loss" -Value $SpeedtestObj.packetloss
Set-Asset-Field -Subdomain "fresh-tech" -Name "Speedtest Health" -Value $SpeedtestHealth

32
web/package-lock.json generated
View File

@@ -8,9 +8,9 @@
"name": "web",
"version": "0.1.8",
"dependencies": {
"@quasar/extras": "^1.10.11",
"@quasar/extras": "^1.10.12",
"apexcharts": "^3.27.1",
"axios": "^0.21.1",
"axios": "^0.21.3",
"dotenv": "^8.6.0",
"prismjs": "^1.23.0",
"qrcode.vue": "^3.2.2",
@@ -2148,9 +2148,9 @@
}
},
"node_modules/@quasar/extras": {
"version": "1.10.11",
"resolved": "https://registry.npmjs.org/@quasar/extras/-/extras-1.10.11.tgz",
"integrity": "sha512-/zJiT8iExl0j2k1zA21Eho8SPMtG5ehcYayszunrq/z7zDp728oWSteI9AfQFnF8/+M06f5HUzy+Vssf6IKH/g==",
"version": "1.10.12",
"resolved": "https://registry.npmjs.org/@quasar/extras/-/extras-1.10.12.tgz",
"integrity": "sha512-CVSxLw/Z6kaEYrooJX7mpby6YDm0eSa8D9/1+KEfiTYfLrPE4wTRuNGKN5liuLtVhFMdGrEkj6T6DInKpQWW9A==",
"funding": {
"type": "github",
"url": "https://donate.quasar.dev"
@@ -3209,11 +3209,11 @@
}
},
"node_modules/axios": {
"version": "0.21.1",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz",
"integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==",
"version": "0.21.3",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.3.tgz",
"integrity": "sha512-JtoZ3Ndke/+Iwt5n+BgSli/3idTvpt5OjKyoCmz4LX5+lPiY5l7C1colYezhlxThjNa/NhngCUWZSZFypIFuaA==",
"dependencies": {
"follow-redirects": "^1.10.0"
"follow-redirects": "^1.14.0"
}
},
"node_modules/babel-loader": {
@@ -14926,9 +14926,9 @@
}
},
"@quasar/extras": {
"version": "1.10.11",
"resolved": "https://registry.npmjs.org/@quasar/extras/-/extras-1.10.11.tgz",
"integrity": "sha512-/zJiT8iExl0j2k1zA21Eho8SPMtG5ehcYayszunrq/z7zDp728oWSteI9AfQFnF8/+M06f5HUzy+Vssf6IKH/g=="
"version": "1.10.12",
"resolved": "https://registry.npmjs.org/@quasar/extras/-/extras-1.10.12.tgz",
"integrity": "sha512-CVSxLw/Z6kaEYrooJX7mpby6YDm0eSa8D9/1+KEfiTYfLrPE4wTRuNGKN5liuLtVhFMdGrEkj6T6DInKpQWW9A=="
},
"@quasar/fastclick": {
"version": "1.1.4",
@@ -15858,11 +15858,11 @@
}
},
"axios": {
"version": "0.21.1",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz",
"integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==",
"version": "0.21.3",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.3.tgz",
"integrity": "sha512-JtoZ3Ndke/+Iwt5n+BgSli/3idTvpt5OjKyoCmz4LX5+lPiY5l7C1colYezhlxThjNa/NhngCUWZSZFypIFuaA==",
"requires": {
"follow-redirects": "^1.10.0"
"follow-redirects": "^1.14.0"
}
},
"babel-loader": {

View File

@@ -10,9 +10,9 @@
"test:e2e:ci": "cross-env E2E_TEST=true start-test \"quasar dev\" http-get://localhost:8080 \"cypress run\""
},
"dependencies": {
"@quasar/extras": "^1.10.11",
"@quasar/extras": "^1.10.12",
"apexcharts": "^3.27.1",
"axios": "^0.21.1",
"axios": "^0.21.3",
"dotenv": "^8.6.0",
"prismjs": "^1.23.0",
"qrcode.vue": "^3.2.2",

View File

@@ -2,9 +2,34 @@ import axios from "axios"
const baseUrl = "/accounts"
// user api functions
export async function fetchUsers(params = {}) {
try {
const { data } = await axios.get(`${baseUrl}/users/`, { params: params })
return data
} catch (e) { }
}
}
// api key api functions
export async function fetchAPIKeys(params = {}) {
try {
const { data } = await axios.get(`${baseUrl}/apikeys/`, { params: params })
return data
} catch (e) { }
}
export async function saveAPIKey(payload) {
const { data } = await axios.post(`${baseUrl}/apikeys/`, payload)
return data
}
export async function editAPIKey(payload) {
const { data } = await axios.put(`${baseUrl}/apikeys/${payload.id}/`, payload)
return data
}
export async function removeAPIKey(id) {
const { data } = await axios.delete(`${baseUrl}/apikeys/${id}/`)
return data
}

View File

@@ -479,7 +479,8 @@ export default {
if (filter === "actionspending") actions = true;
else if (filter === "checksfailing") checks = true;
else if (filter === "rebootneeded") reboot = true;
else if (filter === "online" || filter === "offline" || filter === "expired") availability = filter;
else if (filter === "online" || filter === "offline" || filter === "expired" || filter === "overdue")
availability = filter;
} else {
search = param + "";
}
@@ -492,7 +493,8 @@ export default {
if (actions && row.pending_actions_count === 0) return false;
if (reboot && !row.needs_reboot) return false;
if (availability === "online" && row.status !== "online") return false;
else if (availability === "offline" && row.status !== "overdue") return false;
else if (availability === "offline" && row.status !== "offline") return false;
else if (availability === "overdue" && row.status !== "overdue") return false;
else if (availability === "expired") {
let now = new Date();
let lastSeen = date.extractDate(row.last_seen, "MM DD YYYY HH:mm");

View File

@@ -43,7 +43,7 @@
</span>
<span v-else-if="props.row.type === 'cmd_run'"
>{{ truncateText(props.row.results, 30) }}
<q-tooltip v-if="props.row.results.length >= 30" style="font-size: 12px">
<q-tooltip v-if="props.row.results !== null && props.row.results.length >= 30" style="font-size: 12px">
{{ props.row.results }}
</q-tooltip>
</span>
@@ -83,14 +83,14 @@ const columns = [
sortable: true,
format: (val, row) => formatTableColumnText(val),
},
{
/* {
name: "status",
label: "Status",
field: "status",
align: "left",
sortable: true,
format: (val, row) => formatTableColumnText(val),
},
}, */
{ name: "command", label: "Script/Command", field: "command", align: "left", sortable: true },
{ name: "username", label: "Initiated By", field: "username", align: "left", sortable: true },
{ name: "output", label: "Output", field: "output", align: "left", sortable: true },

View File

@@ -0,0 +1,122 @@
<template>
<q-dialog ref="dialogRef" @hide="onDialogHide">
<q-card class="q-dialog-plugin" style="width: 60vw">
<q-bar>
{{ title }}
<q-space />
<q-btn dense flat icon="close" v-close-popup>
<q-tooltip class="bg-white text-primary">Close</q-tooltip>
</q-btn>
</q-bar>
<q-form @submit.prevent="submitForm">
<q-card-section>
<span v-if="!APIKey">API Key will be generated on save</span>
</q-card-section>
<!-- name -->
<q-card-section>
<q-input label="Name" outlined dense v-model="localKey.name" :rules="[val => !!val || '*Required']" />
</q-card-section>
<!-- user -->
<q-card-section>
<tactical-dropdown outlined v-model="localKey.user" label="User" :options="userOptions" mapOptions />
</q-card-section>
<!-- key -->
<q-card-section v-if="APIKey">
<q-input readonly label="Key" outlined dense v-model="localKey.key" />
</q-card-section>
<!-- expiration -->
<q-card-section>
<q-input dense label="Key Expiration (Not required) " filled v-model="localKey.expiration">
<template v-slot:append>
<q-icon name="event" class="cursor-pointer">
<q-popup-proxy transition-show="scale" transition-hide="scale">
<q-date v-model="localKey.expiration" mask="YYYY-MM-DD HH:mm">
<div class="row items-center justify-end">
<q-btn v-close-popup label="Close" color="primary" flat />
</div>
</q-date>
</q-popup-proxy>
</q-icon>
<q-icon name="access_time" class="cursor-pointer">
<q-popup-proxy transition-show="scale" transition-hide="scale">
<q-time v-model="localKey.expiration" mask="YYYY-MM-DD HH:mm">
<div class="row items-center justify-end">
<q-btn v-close-popup label="Close" color="primary" flat />
</div>
</q-time>
</q-popup-proxy>
</q-icon>
</template>
</q-input>
</q-card-section>
<q-card-actions align="right">
<q-btn flat label="Cancel" v-close-popup />
<q-btn flat label="Submit" color="primary" type="submit" :loading="loading" />
</q-card-actions>
</q-form>
</q-card>
</q-dialog>
</template>
<script>
// composition imports
import { ref, computed } from "vue";
import { useDialogPluginComponent } from "quasar";
import { saveAPIKey, editAPIKey } from "@/api/accounts";
import { useUserDropdown } from "@/composables/accounts";
import { notifySuccess } from "@/utils/notify";
import TacticalDropdown from "@/components/ui/TacticalDropdown.vue";
export default {
components: { TacticalDropdown },
name: "APIKeysForm",
emits: [...useDialogPluginComponent.emits],
props: { APIKey: Object },
setup(props) {
// setup quasar plugins
const { dialogRef, onDialogHide, onDialogOK } = useDialogPluginComponent();
// setup dropdowns
const { userOptions } = useUserDropdown(true);
// setup api key form logic
const key = props.APIKey ? ref(Object.assign({}, props.APIKey)) : ref({ name: "", expiration: null });
const loading = ref(false);
const title = computed(() => (props.APIKey ? "Edit API Key" : "Add API Key"));
async function submitForm() {
loading.value = true;
try {
const result = props.APIKey ? await editAPIKey(key.value) : await saveAPIKey(key.value);
onDialogOK();
notifySuccess(result);
loading.value = false;
} catch (e) {
loading.value = false;
}
}
return {
// reactive data
localKey: key,
loading,
userOptions,
// computed
title,
// methods
submitForm,
// quasar dialog
dialogRef,
onDialogHide,
onDialogOK,
};
},
};
</script>

View File

@@ -0,0 +1,212 @@
<template>
<div>
<div class="row">
<div class="text-subtitle2">API Keys</div>
<q-space />
<q-btn size="sm" color="grey-5" icon="fas fa-plus" text-color="black" label="Add key" @click="addAPIKey" />
</div>
<hr />
<q-table
dense
:rows="keys"
:columns="columns"
v-model:pagination="pagination"
row-key="id"
binary-state-sort
hide-pagination
virtual-scroll
:rows-per-page-options="[0]"
no-data-label="No API tokens added yet"
>
<!-- header slots -->
<template v-slot:header-cell-actions="props">
<q-th :props="props" auto-width> </q-th>
</template>
<!-- body slots -->
<template v-slot:body="props">
<q-tr :props="props" class="cursor-pointer" @dblclick="editAPIKey(props.row)">
<!-- context menu -->
<q-menu context-menu>
<q-list dense style="min-width: 200px">
<q-item clickable v-close-popup @click="editAPIKey(props.row)">
<q-item-section side>
<q-icon name="edit" />
</q-item-section>
<q-item-section>Edit</q-item-section>
</q-item>
<q-item clickable v-close-popup @click="deleteAPIKey(props.row)">
<q-item-section side>
<q-icon name="delete" />
</q-item-section>
<q-item-section>Delete</q-item-section>
</q-item>
<q-separator></q-separator>
<q-item clickable v-close-popup>
<q-item-section>Close</q-item-section>
</q-item>
</q-list>
</q-menu>
<!-- name -->
<q-td>
{{ props.row.name }}
</q-td>
<q-td>
{{ props.row.username }}
</q-td>
<!-- expiration -->
<q-td>
{{ props.row.expiration }}
</q-td>
<!-- created time -->
<q-td>
{{ props.row.created_time }}
</q-td>
<q-td>
<q-icon size="sm" name="content_copy" @click="copyKeyToClipboard(props.row.key)">
<q-tooltip>Copy API Key to clipboard</q-tooltip>
</q-icon>
</q-td>
</q-tr>
</template>
</q-table>
</div>
</template>
<script>
// composition imports
import { ref, onMounted } from "vue";
import { fetchAPIKeys, removeAPIKey } from "@/api/accounts";
import { useQuasar, copyToClipboard } from "quasar";
import { notifySuccess, notifyError } from "@/utils/notify";
import APIKeysForm from "@/components/core/APIKeysForm";
const columns = [
{
name: "name",
label: "Name",
field: "name",
align: "left",
sortable: true,
},
{
name: "username",
label: "User",
field: "username",
align: "left",
sortable: true,
},
{
name: "expiration",
label: "Expiration",
field: "expiration",
align: "left",
sortable: true,
},
{
name: "created_time",
label: "Created",
field: "created_time",
align: "left",
sortable: true,
},
{
name: "actions",
label: "",
field: "actions",
},
];
export default {
name: "APIKeysTable",
setup() {
// setup quasar
const $q = useQuasar();
// setup api keys logic
const keys = ref([]);
const loading = ref(false);
// setup table
const pagination = ref({
rowsPerPage: 0,
sortBy: "name",
descending: true,
});
function copyKeyToClipboard(apikey) {
copyToClipboard(apikey)
.then(() => {
notifySuccess("Key was copied to clipboard!");
})
.catch(() => {
notifyError("Unable to copy to clipboard!");
});
}
// api functions
async function getAPIKeys() {
loading.value = true;
keys.value = await fetchAPIKeys();
loading.value = false;
}
async function deleteAPIKey(key) {
$q.dialog({
title: `Delete API key: ${key.name}?`,
cancel: true,
ok: { label: "Delete", color: "negative" },
}).onOk(async () => {
loading.value = true;
try {
const result = await removeAPIKey(key.id);
notifySuccess(result);
getAPIKeys();
loading.value = false;
} catch (e) {
console.log(e);
loading.value = false;
}
});
}
// quasar dialog functions
function editAPIKey(key) {
$q.dialog({
component: APIKeysForm,
componentProps: {
APIKey: key,
},
}).onOk(() => getAPIKeys());
}
function addAPIKey() {
$q.dialog({
component: APIKeysForm,
}).onOk(() => getAPIKeys());
}
// component lifecycle hooks
onMounted(getAPIKeys());
return {
// reactive data
keys,
loading,
pagination,
// non-reactive data
columns,
//methods
getAPIKeys,
deleteAPIKey,
copyKeyToClipboard,
//dialogs
editAPIKey,
addAPIKey,
};
},
};
</script>

View File

@@ -155,6 +155,7 @@ const agentActionOptions = [
{ value: "execute_command", label: "Execute Command" },
{ value: "execute_script", label: "Execute Script" },
{ value: "remote_session", label: "Remote Session" },
{ value: "url_action", label: "URL Action" },
];
const actionOptions = [
@@ -181,6 +182,9 @@ const objectOptions = [
{ value: "winupdatepolicy", label: "Patch Policy" },
{ value: "alerttemplate", label: "Alert Template" },
{ value: "role", label: "Role" },
{ value: "urlaction", label: "URL Action" },
{ value: "keystore", label: "Global Key Store" },
{ value: "customfield", label: "Custom Field" },
];
const timeOptions = [

View File

@@ -56,6 +56,7 @@
<q-checkbox v-model="role.can_edit_core_settings" label="Edit Global Settings" />
<q-checkbox v-model="role.can_do_server_maint" label="Do Server Maintenance" />
<q-checkbox v-model="role.can_code_sign" label="Manage Code Signing" />
<q-checkbox v-model="role.can_manage_api_keys" label="Manage API Keys" />
</div>
</q-card-section>
@@ -180,6 +181,7 @@ export default {
can_manage_notes: false,
can_view_core_settings: false,
can_edit_core_settings: false,
can_manage_api_keys: false,
can_do_server_maint: false,
can_code_sign: false,
can_manage_checks: false,

View File

@@ -68,7 +68,7 @@
<q-card-section class="row">
<div class="col-2">Active:</div>
<div class="col-10">
<q-toggle v-model="localUser.is_active" color="green" :disable="localUser.username === logged_in_user" />
<q-checkbox v-model="localUser.is_active" :disable="localUser.username === logged_in_user" />
</div>
</q-card-section>
<q-card-section class="row">
@@ -88,6 +88,14 @@
class="col-10"
/></template>
</q-card-section>
<q-card-section>
<q-checkbox
label="Deny Dashboard Logins"
left-label
v-model="localUser.block_dashboard_login"
:disable="localUser.username === logged_in_user"
/>
</q-card-section>
<q-card-section class="row items-center">
<q-btn :disable="!disableSave" label="Save" color="primary" type="submit" />
</q-card-section>
@@ -109,6 +117,7 @@ export default {
return {
localUser: {
is_active: true,
block_dashboard_login: false,
},
roles: [],
isPwd: true,
@@ -146,6 +155,7 @@ export default {
// dont allow updating is_active if username is same as logged in user
if (this.localUser.username === this.logged_in_user) {
delete this.localUser.is_active;
delete this.localUser.deny_dashboard_login;
}
this.$axios

View File

@@ -11,6 +11,7 @@
<q-tab name="keystore" label="Key Store" />
<q-tab name="urlactions" label="URL Actions" />
<q-tab name="retention" label="Retention" />
<q-tab name="apikeys" label="API Keys" />
</q-tabs>
</template>
<template v-slot:after>
@@ -384,6 +385,10 @@
/>
</q-card-section>
</q-tab-panel>
<q-tab-panel name="apikeys">
<APIKeysTable />
</q-tab-panel>
</q-tab-panels>
</q-scroll-area>
<q-card-section class="row items-center">
@@ -422,6 +427,7 @@ import ResetPatchPolicy from "@/components/modals/coresettings/ResetPatchPolicy"
import CustomFields from "@/components/modals/coresettings/CustomFields";
import KeyStoreTable from "@/components/modals/coresettings/KeyStoreTable";
import URLActionsTable from "@/components/modals/coresettings/URLActionsTable";
import APIKeysTable from "@/components/core/APIKeysTable";
export default {
name: "EditCoreSettings",
@@ -430,6 +436,7 @@ export default {
CustomFields,
KeyStoreTable,
URLActionsTable,
APIKeysTable,
},
mixins: [mixins],
data() {

View File

@@ -150,7 +150,7 @@ export default {
// script form logic
const script = props.script
? ref(Object.assign({}, props.script))
: ref({ shell: "powershell", default_timeout: 90 });
: ref({ shell: "powershell", default_timeout: 90, args: [] });
if (props.clone) script.value.name = `(Copy) ${script.value.name}`;
const code = ref("");

View File

@@ -1,9 +1,9 @@
import { ref } from "vue"
import { ref, onMounted } from "vue"
import { fetchUsers } from "@/api/accounts"
import { formatUserOptions } from "@/utils/format"
export function useUserDropdown() {
export function useUserDropdown(onMount = false) {
const userOptions = ref([])
const userDropdownLoading = ref(false)
@@ -32,6 +32,10 @@ export function useUserDropdown() {
})
}
if (onMount) {
onMounted(getUserOptions())
}
return {
//data
userOptions,

View File

@@ -337,6 +337,16 @@
</q-item-section>
</q-item>
<q-item>
<q-item-section side>
<q-radio val="overdue" v-model="filterAvailability" />
</q-item-section>
<q-item-section>
<q-item-label>Show Overdue Only</q-item-label>
</q-item-section>
</q-item>
<q-item>
<q-item-section side>
<q-radio val="offline_30days" v-model="filterAvailability" />
@@ -809,7 +819,10 @@ export default {
// clear search if availability changes to all
if (
this.filterAvailability === "all" &&
(this.search.includes("is:online") || this.search.includes("is:offline") || this.search.includes("is:expired"))
(this.search.includes("is:online") ||
this.search.includes("is:offline") ||
this.search.includes("is:expired") ||
this.search.includes("is:overdue"))
)
this.clearFilter();
@@ -841,6 +854,8 @@ export default {
filterText += "is:offline ";
} else if (this.filterAvailability === "offline_30days") {
filterText += "is:expired ";
} else if (this.filterAvailability === "overdue") {
filterText += "is:overdue ";
}
}