Compare commits
98 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e76fa878d2 | ||
|
|
376b421eb9 | ||
|
|
e1643aca80 | ||
|
|
4e97c0c5c9 | ||
|
|
05b88a3c73 | ||
|
|
3c087d49e9 | ||
|
|
d81fcccf10 | ||
|
|
ee3a7bbbfc | ||
|
|
82d9e2fb16 | ||
|
|
6ab39d6f70 | ||
|
|
4aa413e697 | ||
|
|
04b3fc54b0 | ||
|
|
e4c5a4e886 | ||
|
|
a0ee7a59eb | ||
|
|
b4a05160df | ||
|
|
1a437b3961 | ||
|
|
bda8555190 | ||
|
|
10ca38f91d | ||
|
|
a468faad20 | ||
|
|
7a20be4aff | ||
|
|
06b974c8a4 | ||
|
|
7284d9fcd8 | ||
|
|
515394049a | ||
|
|
35c8b4f535 | ||
|
|
1a325a66b4 | ||
|
|
7d82116fb9 | ||
|
|
8a7bd4f21b | ||
|
|
2e5a2ef12d | ||
|
|
89aceda65a | ||
|
|
39fd83aa16 | ||
|
|
a23d811fe8 | ||
|
|
a238779724 | ||
|
|
3a848bc037 | ||
|
|
0528ecb454 | ||
|
|
141835593c | ||
|
|
3d06200368 | ||
|
|
729bef9a77 | ||
|
|
94f33bd642 | ||
|
|
7e010cdbca | ||
|
|
8887bcd941 | ||
|
|
56aeeee04c | ||
|
|
98eb3c7287 | ||
|
|
6819c1989b | ||
|
|
7e01dd3e97 | ||
|
|
ea4f2c3de8 | ||
|
|
b2f63b8761 | ||
|
|
65865101ce | ||
|
|
c3637afe69 | ||
|
|
ab543ddf0c | ||
|
|
80595e76e7 | ||
|
|
d49e68737a | ||
|
|
712e15ba80 | ||
|
|
986160e667 | ||
|
|
1ae4e23db1 | ||
|
|
bad646141c | ||
|
|
7911235b68 | ||
|
|
12dee4d14d | ||
|
|
cba841beb8 | ||
|
|
4e3ebf7078 | ||
|
|
1c34969f64 | ||
|
|
dc26cabacd | ||
|
|
a7bffcd471 | ||
|
|
6ae56ac2cc | ||
|
|
03c087020c | ||
|
|
857a1ab9c4 | ||
|
|
64d9530e13 | ||
|
|
5dac1efc30 | ||
|
|
18bc74bc96 | ||
|
|
f64efc63f8 | ||
|
|
e84b897991 | ||
|
|
519647ef93 | ||
|
|
f694fe00e4 | ||
|
|
0b951f27b6 | ||
|
|
8aa082c9df | ||
|
|
f2c5d47bd8 | ||
|
|
ac7642cc15 | ||
|
|
8f34865dab | ||
|
|
c762d12a40 | ||
|
|
fe1e71dc07 | ||
|
|
85b0350ed4 | ||
|
|
a980491455 | ||
|
|
5798c0ccaa | ||
|
|
742f49ca1f | ||
|
|
5560fc805b | ||
|
|
9d4f8a4e8c | ||
|
|
b4d25d6285 | ||
|
|
a504a376bd | ||
|
|
f61ea6e90a | ||
|
|
b2651df36f | ||
|
|
b56c086841 | ||
|
|
4ac1030289 | ||
|
|
93c7117319 | ||
|
|
974afd92ce | ||
|
|
eafd38d3f2 | ||
|
|
c4e590e7a0 | ||
|
|
d754f3dd4c | ||
|
|
f54fc9e990 | ||
|
|
1bf8ff73f8 |
@@ -1,11 +1,11 @@
|
||||
# pulls community scripts from git repo
|
||||
FROM python:3.11.6-slim AS GET_SCRIPTS_STAGE
|
||||
FROM python:3.11.8-slim AS GET_SCRIPTS_STAGE
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends git && \
|
||||
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
||||
|
||||
FROM python:3.11.6-slim
|
||||
FROM python:3.11.8-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -14,11 +14,12 @@ assignees: ''
|
||||
|
||||
**Installation Method:**
|
||||
- [ ] Standard
|
||||
- [ ] Standard with `--insecure` flag at install
|
||||
- [ ] Docker
|
||||
|
||||
**Agent Info (please complete the following information):**
|
||||
- Agent version (as shown in the 'Summary' tab of the agent from web UI):
|
||||
- Agent OS: [e.g. Win 10 v2004, Server 2012 R2]
|
||||
- Agent OS: [e.g. Win 10 v2004, Server 2016]
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
2
.github/workflows/ci-tests.yml
vendored
2
.github/workflows/ci-tests.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
name: Tests
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.11.6"]
|
||||
python-version: ["3.11.8"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
20
.vscode/settings.json
vendored
20
.vscode/settings.json
vendored
@@ -8,24 +8,6 @@
|
||||
"reportGeneralTypeIssues": "none"
|
||||
},
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.mypyArgs": [
|
||||
"--ignore-missing-imports",
|
||||
"--follow-imports=silent",
|
||||
"--show-column-numbers",
|
||||
"--strict"
|
||||
],
|
||||
"python.linting.ignorePatterns": [
|
||||
"**/site-packages/**/*.py",
|
||||
".vscode/*.py",
|
||||
"**env/**"
|
||||
],
|
||||
"python.formatting.provider": "none",
|
||||
//"mypy.targets": [
|
||||
//"api/tacticalrmm"
|
||||
//],
|
||||
//"mypy.runUsingActiveInterpreter": true,
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": true,
|
||||
"editor.formatOnSave": true,
|
||||
@@ -34,7 +16,6 @@
|
||||
"**/docker/**/docker-compose*.yml": "dockercompose"
|
||||
},
|
||||
"files.watcherExclude": {
|
||||
"files.watcherExclude": {
|
||||
"**/.git/objects/**": true,
|
||||
"**/.git/subtree-cache/**": true,
|
||||
"**/node_modules/": true,
|
||||
@@ -53,7 +34,6 @@
|
||||
"**/*.parquet*": true,
|
||||
"**/*.pyc": true,
|
||||
"**/*.zip": true
|
||||
}
|
||||
},
|
||||
"go.useLanguageServer": true,
|
||||
"[go]": {
|
||||
|
||||
@@ -19,7 +19,7 @@ Demo database resets every hour. A lot of features are disabled for obvious reas
|
||||
- Teamviewer-like remote desktop control
|
||||
- Real-time remote shell
|
||||
- Remote file browser (download and upload files)
|
||||
- Remote command and script execution (batch, powershell and python scripts)
|
||||
- Remote command and script execution (batch, powershell, python, nushell and deno scripts)
|
||||
- Event log viewer
|
||||
- Services management
|
||||
- Windows patch management
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
user: "tactical"
|
||||
python_ver: "3.11.6"
|
||||
python_ver: "3.11.8"
|
||||
go_ver: "1.20.7"
|
||||
backend_repo: "https://github.com/amidaware/tacticalrmm.git"
|
||||
frontend_repo: "https://github.com/amidaware/tacticalrmm-web.git"
|
||||
|
||||
@@ -13,7 +13,6 @@ DATABASES = {
|
||||
'PORT': '5432',
|
||||
}
|
||||
}
|
||||
REDIS_HOST = "localhost"
|
||||
ADMIN_ENABLED = True
|
||||
CERT_FILE = "{{ fullchain_dest }}"
|
||||
KEY_FILE = "{{ privkey_dest }}"
|
||||
|
||||
@@ -64,6 +64,15 @@ class User(AbstractUser, BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
@property
|
||||
def mesh_user_id(self):
|
||||
return f"user//{self.mesh_username}"
|
||||
|
||||
@property
|
||||
def mesh_username(self):
|
||||
# lower() needed for mesh api
|
||||
return f"{self.username.replace(' ', '').lower()}___{self.pk}"
|
||||
|
||||
@staticmethod
|
||||
def serialize(user):
|
||||
# serializes the task and returns json
|
||||
@@ -195,7 +204,7 @@ class Role(BaseAuditModel):
|
||||
def save(self, *args, **kwargs) -> None:
|
||||
# delete cache on save
|
||||
cache.delete(f"{ROLE_CACHE_PREFIX}{self.name}")
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def serialize(role):
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.http import HttpRequest
|
||||
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
@@ -16,3 +18,7 @@ def is_root_user(*, request: "HttpRequest", user: "User") -> bool:
|
||||
getattr(settings, "DEMO", False) and request.user.username == settings.ROOT_USER
|
||||
)
|
||||
return root or demo
|
||||
|
||||
|
||||
def is_superuser(user: "User") -> bool:
|
||||
return user.role and getattr(user.role, "is_superuser")
|
||||
|
||||
@@ -11,6 +11,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.utils import is_root_user
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.helpers import notify_error
|
||||
|
||||
@@ -133,6 +134,7 @@ class GetAddUsers(APIView):
|
||||
user.role = role
|
||||
|
||||
user.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response(user.username)
|
||||
|
||||
|
||||
@@ -153,6 +155,7 @@ class GetUpdateDeleteUser(APIView):
|
||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -162,7 +165,7 @@ class GetUpdateDeleteUser(APIView):
|
||||
return notify_error("The root user cannot be deleted from the UI")
|
||||
|
||||
user.delete()
|
||||
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -243,11 +246,13 @@ class GetUpdateDeleteRole(APIView):
|
||||
serializer = RoleSerializer(instance=role, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("Role was edited")
|
||||
|
||||
def delete(self, request, pk):
|
||||
role = get_object_or_404(Role, pk=pk)
|
||||
role.delete()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("Role was removed")
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0058_alter_agent_time_zone"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="agenthistory",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -20,7 +20,7 @@ from packaging.version import Version as LooseVersion
|
||||
from agents.utils import get_agent_url
|
||||
from checks.models import CheckResult
|
||||
from core.models import TZ_CHOICES
|
||||
from core.utils import get_core_settings, send_command_with_mesh
|
||||
from core.utils import _b64_to_hex, get_core_settings, send_command_with_mesh
|
||||
from logs.models import BaseAuditModel, DebugLog, PendingAction
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_STATUS_OFFLINE,
|
||||
@@ -452,6 +452,10 @@ class Agent(BaseAuditModel):
|
||||
except:
|
||||
return ""
|
||||
|
||||
@property
|
||||
def hex_mesh_node_id(self) -> str:
|
||||
return _b64_to_hex(self.mesh_node_id)
|
||||
|
||||
@classmethod
|
||||
def online_agents(cls, min_version: str = "") -> "List[Agent]":
|
||||
if min_version:
|
||||
@@ -610,6 +614,8 @@ class Agent(BaseAuditModel):
|
||||
},
|
||||
"run_as_user": run_as_user,
|
||||
"env_vars": parsed_env_vars,
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
|
||||
if history_pk != 0:
|
||||
@@ -1084,6 +1090,7 @@ class AgentCustomField(models.Model):
|
||||
class AgentHistory(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="history",
|
||||
|
||||
61
api/tacticalrmm/agents/tests/test_agent_save.py
Normal file
61
api/tacticalrmm/agents/tests/test_agent_save.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.constants import AgentMonType
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class AgentSaveTestCase(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.client1 = baker.make("clients.Client")
|
||||
self.client2 = baker.make("clients.Client")
|
||||
self.site1 = baker.make("clients.Site", client=self.client1)
|
||||
self.site2 = baker.make("clients.Site", client=self.client2)
|
||||
self.site3 = baker.make("clients.Site", client=self.client2)
|
||||
self.agent = baker.make(
|
||||
"agents.Agent",
|
||||
site=self.site1,
|
||||
monitoring_type=AgentMonType.SERVER,
|
||||
)
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_called_on_mon_type_change(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
self.agent.monitoring_type = AgentMonType.WORKSTATION
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_called_once()
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_called_on_site_change(self, mock_set_alert_template):
|
||||
self.agent.site = self.site2
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_called_once()
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_called_on_site_and_montype_change(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
print(f"before: {self.agent.monitoring_type} site: {self.agent.site_id}")
|
||||
self.agent.site = self.site3
|
||||
self.agent.monitoring_type = AgentMonType.WORKSTATION
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_called_once()
|
||||
print(f"after: {self.agent.monitoring_type} site: {self.agent.site_id}")
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_not_called_without_changes(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_not_called()
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_not_called_on_non_relevant_field_change(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
self.agent.hostname = "abc123"
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_not_called()
|
||||
@@ -15,6 +15,7 @@ urlpatterns = [
|
||||
path("<agent:agent_id>/wmi/", views.WMI.as_view()),
|
||||
path("<agent:agent_id>/recover/", views.recover),
|
||||
path("<agent:agent_id>/reboot/", views.Reboot.as_view()),
|
||||
path("<agent:agent_id>/shutdown/", views.Shutdown.as_view()),
|
||||
path("<agent:agent_id>/ping/", views.ping),
|
||||
# alias for checks get view
|
||||
path("<agent:agent_id>/checks/", GetAddChecks.as_view()),
|
||||
|
||||
@@ -21,6 +21,7 @@ from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from core.utils import (
|
||||
get_core_settings,
|
||||
get_mesh_ws_url,
|
||||
@@ -258,6 +259,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("The agent was updated successfully")
|
||||
|
||||
# uninstall agent
|
||||
@@ -283,6 +285,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
message=f"Unable to remove agent {name} from meshcentral database: {e}",
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
)
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
|
||||
|
||||
@@ -325,13 +328,13 @@ class AgentMeshCentral(APIView):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
core = get_core_settings()
|
||||
|
||||
if not core.mesh_disable_auto_login:
|
||||
token = get_login_token(
|
||||
key=core.mesh_token, user=f"user//{core.mesh_username}"
|
||||
)
|
||||
token_param = f"login={token}&"
|
||||
else:
|
||||
token_param = ""
|
||||
user = (
|
||||
request.user.mesh_user_id
|
||||
if core.sync_mesh_with_trmm
|
||||
else f"user//{core.mesh_api_superuser}"
|
||||
)
|
||||
token = get_login_token(key=core.mesh_token, user=user)
|
||||
token_param = f"login={token}&"
|
||||
|
||||
control = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
terminal = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
@@ -491,6 +494,19 @@ def send_raw_cmd(request, agent_id):
|
||||
return Response(r)
|
||||
|
||||
|
||||
class Shutdown(APIView):
|
||||
permission_classes = [IsAuthenticated, RebootAgentPerms]
|
||||
|
||||
# shutdown
|
||||
def post(self, request, agent_id):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
r = asyncio.run(agent.nats_cmd({"func": "shutdown"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Reboot(APIView):
|
||||
permission_classes = [IsAuthenticated, RebootAgentPerms]
|
||||
|
||||
@@ -971,6 +987,8 @@ def bulk(request):
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
ht = "Check the History tab on the agent to view the results."
|
||||
|
||||
if request.data["mode"] == "command":
|
||||
if request.data["shell"] == "custom" and request.data["custom_shell"]:
|
||||
shell = request.data["custom_shell"]
|
||||
@@ -985,7 +1003,7 @@ def bulk(request):
|
||||
username=request.user.username[:50],
|
||||
run_as_user=request.data["run_as_user"],
|
||||
)
|
||||
return Response(f"Command will now be run on {len(agents)} agents")
|
||||
return Response(f"Command will now be run on {len(agents)} agents. {ht}")
|
||||
|
||||
elif request.data["mode"] == "script":
|
||||
script = get_object_or_404(Script, pk=request.data["script"])
|
||||
@@ -1000,7 +1018,7 @@ def bulk(request):
|
||||
env_vars=request.data["env_vars"],
|
||||
)
|
||||
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents. {ht}")
|
||||
|
||||
elif request.data["mode"] == "patch":
|
||||
if request.data["patchMode"] == "install":
|
||||
|
||||
@@ -1429,6 +1429,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"run_as_user": False,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
"id": AgentHistory.objects.last().pk, # type: ignore
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
|
||||
nats_cmd.assert_called_with(data, timeout=30, wait=True)
|
||||
@@ -1460,6 +1462,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"run_as_user": False,
|
||||
"env_vars": ["resolved=action", "env=vars"],
|
||||
"id": AgentHistory.objects.last().pk, # type: ignore
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
|
||||
nats_cmd.assert_called_with(data, timeout=35, wait=True)
|
||||
|
||||
@@ -22,4 +22,12 @@ def get_agent_config() -> AgentCheckInConfig:
|
||||
*getattr(settings, "CHECKIN_SYNCMESH", (800, 1200))
|
||||
),
|
||||
limit_data=getattr(settings, "LIMIT_DATA", False),
|
||||
install_nushell=getattr(settings, "INSTALL_NUSHELL", False),
|
||||
install_nushell_version=getattr(settings, "INSTALL_NUSHELL_VERSION", ""),
|
||||
install_nushell_url=getattr(settings, "INSTALL_NUSHELL_URL", ""),
|
||||
nushell_enable_config=getattr(settings, "NUSHELL_ENABLE_CONFIG", False),
|
||||
install_deno=getattr(settings, "INSTALL_DENO", False),
|
||||
install_deno_version=getattr(settings, "INSTALL_DENO_VERSION", ""),
|
||||
install_deno_url=getattr(settings, "INSTALL_DENO_URL", ""),
|
||||
deno_default_permissions=getattr(settings, "DENO_DEFAULT_PERMISSIONS", ""),
|
||||
)
|
||||
|
||||
@@ -20,6 +20,7 @@ from autotasks.serializers import TaskGOGetSerializer, TaskResultSerializer
|
||||
from checks.constants import CHECK_DEFER, CHECK_RESULT_DEFER
|
||||
from checks.models import Check, CheckResult
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from core.utils import (
|
||||
download_mesh_agent,
|
||||
get_core_settings,
|
||||
@@ -31,6 +32,8 @@ from logs.models import DebugLog, PendingAction
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_DEFER,
|
||||
TRMM_MAX_REQUEST_SIZE,
|
||||
AgentHistoryType,
|
||||
AgentMonType,
|
||||
AgentPlat,
|
||||
AuditActionType,
|
||||
@@ -338,6 +341,12 @@ class TaskRunner(APIView):
|
||||
AutomatedTask.objects.select_related("custom_field"), pk=pk
|
||||
)
|
||||
|
||||
content_length = request.META.get("CONTENT_LENGTH")
|
||||
if content_length and int(content_length) > TRMM_MAX_REQUEST_SIZE:
|
||||
request.data["stdout"] = ""
|
||||
request.data["stderr"] = "Content truncated due to excessive request size."
|
||||
request.data["retcode"] = 1
|
||||
|
||||
# get task result or create if doesn't exist
|
||||
try:
|
||||
task_result = (
|
||||
@@ -356,7 +365,7 @@ class TaskRunner(APIView):
|
||||
|
||||
AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AuditActionType.TASK_RUN,
|
||||
type=AgentHistoryType.TASK_RUN,
|
||||
command=task.name,
|
||||
script_results=request.data,
|
||||
)
|
||||
@@ -481,6 +490,7 @@ class NewAgent(APIView):
|
||||
)
|
||||
|
||||
ret = {"pk": agent.pk, "token": token.key}
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response(ret)
|
||||
|
||||
|
||||
@@ -559,6 +569,15 @@ class AgentHistoryResult(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request, agentid, pk):
|
||||
content_length = request.META.get("CONTENT_LENGTH")
|
||||
if content_length and int(content_length) > TRMM_MAX_REQUEST_SIZE:
|
||||
|
||||
request.data["script_results"]["stdout"] = ""
|
||||
request.data["script_results"][
|
||||
"stderr"
|
||||
] = "Content truncated due to excessive request size."
|
||||
request.data["script_results"]["retcode"] = 1
|
||||
|
||||
hist = get_object_or_404(
|
||||
AgentHistory.objects.filter(agent__agent_id=agentid), pk=pk
|
||||
)
|
||||
|
||||
@@ -47,7 +47,7 @@ class Policy(BaseAuditModel):
|
||||
old_policy: Optional[Policy] = (
|
||||
type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
)
|
||||
super(Policy, self).save(old_model=old_policy, *args, **kwargs)
|
||||
super().save(old_model=old_policy, *args, **kwargs)
|
||||
|
||||
# check if alert template was changes and cache on agents
|
||||
if old_policy:
|
||||
@@ -68,10 +68,7 @@ class Policy(BaseAuditModel):
|
||||
cache.delete_many_pattern("site_server_*")
|
||||
cache.delete_many_pattern("agent_*")
|
||||
|
||||
super(Policy, self).delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.name
|
||||
|
||||
@@ -126,7 +126,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
cache_alert_template.called_once()
|
||||
cache_alert_template.assert_called_once()
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0039_alter_automatedtask_task_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="taskresult",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -151,7 +151,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
# get old task if exists
|
||||
old_task = AutomatedTask.objects.get(pk=self.pk) if self.pk else None
|
||||
super(AutomatedTask, self).save(old_model=old_task, *args, **kwargs)
|
||||
super().save(old_model=old_task, *args, **kwargs)
|
||||
|
||||
# check if fields were updated that require a sync to the agent and set status to notsynced
|
||||
if old_task:
|
||||
@@ -174,10 +174,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
cache.delete_many_pattern("site_*_tasks")
|
||||
cache.delete_many_pattern("agent_*_tasks")
|
||||
|
||||
super(AutomatedTask, self).delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def schedule(self) -> Optional[str]:
|
||||
@@ -470,6 +467,7 @@ class TaskResult(models.Model):
|
||||
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="taskresults",
|
||||
|
||||
@@ -2,6 +2,7 @@ from datetime import datetime
|
||||
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework import serializers
|
||||
from django.conf import settings
|
||||
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.constants import TaskType
|
||||
@@ -257,6 +258,8 @@ class TaskGOGetSerializer(serializers.ModelSerializer):
|
||||
shell=script.shell,
|
||||
env_vars=env_vars,
|
||||
),
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
if actions_to_remove:
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0031_check_env_vars"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="checkresult",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -168,10 +168,7 @@ class Check(BaseAuditModel):
|
||||
elif self.agent:
|
||||
cache.delete(f"agent_{self.agent.agent_id}_checks")
|
||||
|
||||
super(Check, self).save(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
# if check is a policy check clear cache on everything
|
||||
@@ -183,10 +180,7 @@ class Check(BaseAuditModel):
|
||||
elif self.agent:
|
||||
cache.delete(f"agent_{self.agent.agent_id}_checks")
|
||||
|
||||
super(Check, self).delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def readable_desc(self):
|
||||
@@ -290,6 +284,7 @@ class CheckResult(models.Model):
|
||||
class Meta:
|
||||
unique_together = (("agent", "assigned_check"),)
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="checkresults",
|
||||
@@ -338,10 +333,7 @@ class CheckResult(models.Model):
|
||||
):
|
||||
self.alert_severity = AlertSeverity.WARNING
|
||||
|
||||
super(CheckResult, self).save(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def history_info(self):
|
||||
@@ -673,6 +665,7 @@ class CheckResult(models.Model):
|
||||
class CheckHistory(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
check_id = models.PositiveIntegerField(default=0)
|
||||
agent_id = models.CharField(max_length=200, null=True, blank=True)
|
||||
x = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
@@ -49,11 +49,7 @@ class Client(BaseAuditModel):
|
||||
|
||||
# get old client if exists
|
||||
old_client = Client.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Client, self).save(
|
||||
old_model=old_client,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(old_model=old_client, *args, **kwargs)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_client and (
|
||||
@@ -129,11 +125,7 @@ class Site(BaseAuditModel):
|
||||
|
||||
# get old client if exists
|
||||
old_site = Site.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(
|
||||
old_model=old_site,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(old_model=old_site, *args, **kwargs)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_site:
|
||||
|
||||
@@ -88,6 +88,7 @@ class TestClientViews(TacticalTestCase):
|
||||
"client": {"name": "Setup Client"},
|
||||
"site": {"name": "Setup Site"},
|
||||
"timezone": "America/Los_Angeles",
|
||||
"companyname": "TestCo Inc.",
|
||||
"initialsetup": True,
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
|
||||
@@ -92,7 +92,8 @@ class GetAddClients(APIView):
|
||||
if "initialsetup" in request.data.keys():
|
||||
core = get_core_settings()
|
||||
core.default_time_zone = request.data["timezone"]
|
||||
core.save(update_fields=["default_time_zone"])
|
||||
core.mesh_company_name = request.data["companyname"]
|
||||
core.save(update_fields=["default_time_zone", "mesh_company_name"])
|
||||
|
||||
# save custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
@@ -41,6 +41,7 @@ agentBin="${agentBinPath}/${binName}"
|
||||
agentConf='/etc/tacticalagent'
|
||||
agentSvcName='tacticalagent.service'
|
||||
agentSysD="/etc/systemd/system/${agentSvcName}"
|
||||
agentDir='/opt/tacticalagent'
|
||||
meshDir='/opt/tacticalmesh'
|
||||
meshSystemBin="${meshDir}/meshagent"
|
||||
meshSvcName='meshagent.service'
|
||||
@@ -65,16 +66,20 @@ RemoveOldAgent() {
|
||||
if [ -f "${agentSysD}" ]; then
|
||||
systemctl disable ${agentSvcName}
|
||||
systemctl stop ${agentSvcName}
|
||||
rm -f ${agentSysD}
|
||||
rm -f "${agentSysD}"
|
||||
systemctl daemon-reload
|
||||
fi
|
||||
|
||||
if [ -f "${agentConf}" ]; then
|
||||
rm -f ${agentConf}
|
||||
rm -f "${agentConf}"
|
||||
fi
|
||||
|
||||
if [ -f "${agentBin}" ]; then
|
||||
rm -f ${agentBin}
|
||||
rm -f "${agentBin}"
|
||||
fi
|
||||
|
||||
if [ -d "${agentDir}" ]; then
|
||||
rm -rf "${agentDir}"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -132,16 +137,18 @@ Uninstall() {
|
||||
RemoveOldAgent
|
||||
}
|
||||
|
||||
if [ $# -ne 0 ] && [ $1 == 'uninstall' ]; then
|
||||
if [ $# -ne 0 ] && [[ $1 =~ ^(uninstall|-uninstall|--uninstall)$ ]]; then
|
||||
Uninstall
|
||||
# Remove the current script
|
||||
rm "$0"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
case $1 in
|
||||
--debug) DEBUG=1 ;;
|
||||
--insecure) INSECURE=1 ;;
|
||||
--nomesh) NOMESH=1 ;;
|
||||
-debug | --debug | debug) DEBUG=1 ;;
|
||||
-insecure | --insecure | insecure) INSECURE=1 ;;
|
||||
-nomesh | --nomesh | nomesh) NOMESH=1 ;;
|
||||
*)
|
||||
echo "ERROR: Unknown parameter: $1"
|
||||
exit 1
|
||||
|
||||
@@ -27,7 +27,7 @@ class Command(BaseCommand):
|
||||
self._warning("Mesh device group:", core.mesh_device_group)
|
||||
|
||||
try:
|
||||
token = get_auth_token(core.mesh_username, core.mesh_token)
|
||||
token = get_auth_token(core.mesh_api_superuser, core.mesh_token)
|
||||
except Exception as e:
|
||||
self._error("Error getting auth token:")
|
||||
self._error(str(e))
|
||||
|
||||
@@ -5,6 +5,7 @@ from tacticalrmm.constants import (
|
||||
AGENT_OUTAGES_LOCK,
|
||||
ORPHANED_WIN_TASK_LOCK,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_MESH_PERMS_TASK_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
)
|
||||
|
||||
@@ -18,5 +19,6 @@ class Command(BaseCommand):
|
||||
ORPHANED_WIN_TASK_LOCK,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
SYNC_MESH_PERMS_TASK_LOCK,
|
||||
):
|
||||
cache.delete(key)
|
||||
|
||||
@@ -5,13 +5,14 @@ import websockets
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.utils import get_mesh_ws_url
|
||||
from tacticalrmm.constants import TRMM_WS_MAX_SIZE
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, uri):
|
||||
async with websockets.connect(uri) as websocket:
|
||||
async with websockets.connect(uri, max_size=TRMM_WS_MAX_SIZE) as websocket:
|
||||
# Get Invitation Link
|
||||
await websocket.send(
|
||||
json.dumps(
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from meshctrl.utils import get_login_token
|
||||
|
||||
from core.utils import get_core_settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "generate a url to login to mesh as the superuser"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
core = get_core_settings()
|
||||
|
||||
token = get_login_token(key=core.mesh_token, user=f"user//{core.mesh_username}")
|
||||
token_param = f"login={token}&"
|
||||
|
||||
control = f"{core.mesh_site}/?{token_param}"
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(control))
|
||||
@@ -6,13 +6,14 @@ from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.utils import get_core_settings, get_mesh_ws_url
|
||||
from tacticalrmm.constants import TRMM_WS_MAX_SIZE
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, uri):
|
||||
async with websockets.connect(uri) as websocket:
|
||||
async with websockets.connect(uri, max_size=TRMM_WS_MAX_SIZE) as websocket:
|
||||
# Get Device groups to see if it exists
|
||||
await websocket.send(json.dumps({"action": "meshes"}))
|
||||
|
||||
|
||||
@@ -6,6 +6,8 @@ from accounts.models import User
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check, CheckHistory
|
||||
from core.models import CoreSettings
|
||||
from core.tasks import remove_orphaned_history_results, sync_mesh_perms_task
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.constants import AGENT_DEFER, ScriptType
|
||||
|
||||
@@ -54,4 +56,22 @@ class Command(BaseCommand):
|
||||
|
||||
agent.save(update_fields=["goarch"])
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Checking for orphaned history results...")
|
||||
)
|
||||
count = remove_orphaned_history_results()
|
||||
if count:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Removed {count} orphaned history results.")
|
||||
)
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
if core.sync_mesh_with_trmm:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Syncing trmm users/permissions with meshcentral, this might take a long time...please wait..."
|
||||
)
|
||||
)
|
||||
sync_mesh_perms_task()
|
||||
|
||||
self.stdout.write("Post update tasks finished")
|
||||
|
||||
@@ -8,6 +8,7 @@ from core.tasks import (
|
||||
core_maintenance_tasks,
|
||||
resolve_alerts_task,
|
||||
resolve_pending_actions,
|
||||
sync_mesh_perms_task,
|
||||
sync_scheduled_tasks,
|
||||
)
|
||||
from winupdate.tasks import auto_approve_updates_task, check_agent_update_schedule_task
|
||||
@@ -28,3 +29,4 @@ class Command(BaseCommand):
|
||||
remove_orphaned_win_tasks.delay()
|
||||
auto_approve_updates_task.delay()
|
||||
check_agent_update_schedule_task.delay()
|
||||
sync_mesh_perms_task.delay()
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sync mesh users/perms with trmm users/perms"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Syncing trmm users/permissions with meshcentral, this might take a long time...please wait..."
|
||||
)
|
||||
)
|
||||
sync_mesh_perms_task()
|
||||
183
api/tacticalrmm/core/mesh_utils.py
Normal file
183
api/tacticalrmm/core/mesh_utils.py
Normal file
@@ -0,0 +1,183 @@
|
||||
import asyncio
|
||||
import json
|
||||
import re
|
||||
import secrets
|
||||
import string
|
||||
import traceback
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import websockets
|
||||
|
||||
from accounts.utils import is_superuser
|
||||
from tacticalrmm.constants import TRMM_WS_MAX_SIZE
|
||||
from tacticalrmm.logger import logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
def build_mesh_display_name(
|
||||
*, first_name: str | None, last_name: str | None, company_name: str | None
|
||||
) -> str:
|
||||
ret = ""
|
||||
if first_name:
|
||||
ret += first_name
|
||||
|
||||
if last_name:
|
||||
ret += f" {last_name}"
|
||||
|
||||
if ret and company_name:
|
||||
ret += f" - {company_name}"
|
||||
elif company_name:
|
||||
ret += company_name
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def has_mesh_perms(*, user: "User") -> bool:
|
||||
if user.is_superuser or is_superuser(user):
|
||||
return True
|
||||
|
||||
return user.role and getattr(user.role, "can_use_mesh")
|
||||
|
||||
|
||||
def make_mesh_password() -> str:
|
||||
alpha = string.ascii_letters + string.digits
|
||||
nonalpha = "!@#$"
|
||||
passwd = [secrets.choice(alpha) for _ in range(29)] + [secrets.choice(nonalpha)]
|
||||
secrets.SystemRandom().shuffle(passwd)
|
||||
return "".join(passwd)
|
||||
|
||||
|
||||
def transform_trmm(obj):
|
||||
ret = []
|
||||
try:
|
||||
for node in obj:
|
||||
node_id = node["node_id"]
|
||||
user_ids = [link["_id"] for link in node["links"]]
|
||||
ret.append({"node_id": node_id, "user_ids": user_ids})
|
||||
except Exception:
|
||||
logger.debug(traceback.format_exc)
|
||||
return ret
|
||||
|
||||
|
||||
def transform_mesh(obj):
|
||||
pattern = re.compile(r".*___\d+")
|
||||
ret = []
|
||||
try:
|
||||
for _, nodes in obj.items():
|
||||
for node in nodes:
|
||||
node_id = node["_id"]
|
||||
try:
|
||||
user_ids = [
|
||||
user_id
|
||||
for user_id in node["links"].keys()
|
||||
if pattern.match(user_id)
|
||||
]
|
||||
except KeyError:
|
||||
# will trigger on initial sync cuz no mesh users yet
|
||||
# also triggers for invalid agents after sync
|
||||
pass
|
||||
else:
|
||||
ret.append({"node_id": node_id, "user_ids": user_ids})
|
||||
|
||||
except Exception:
|
||||
logger.debug(traceback.format_exc)
|
||||
return ret
|
||||
|
||||
|
||||
class MeshSync:
|
||||
def __init__(self, uri: str):
|
||||
self.uri = uri
|
||||
self.mesh_users = self.get_trmm_mesh_users() # full list
|
||||
|
||||
def mesh_action(
|
||||
self, *, payload: dict[str, Any], wait=True
|
||||
) -> dict[str, Any] | None:
|
||||
async def _do(payload):
|
||||
async with websockets.connect(self.uri, max_size=TRMM_WS_MAX_SIZE) as ws:
|
||||
await ws.send(json.dumps(payload))
|
||||
if wait:
|
||||
while 1:
|
||||
try:
|
||||
message = await asyncio.wait_for(ws.recv(), 120)
|
||||
r = json.loads(message)
|
||||
if r["action"] == payload["action"]:
|
||||
return r
|
||||
except asyncio.TimeoutError:
|
||||
logger.error("Timeout reached.")
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
payload["responseid"] = "meshctrl"
|
||||
logger.debug(payload)
|
||||
|
||||
return asyncio.run(_do(payload))
|
||||
|
||||
def get_unique_mesh_users(
|
||||
self, trmm_agents_list: list[dict[str, Any]]
|
||||
) -> list[str]:
|
||||
userids = [i["links"] for i in trmm_agents_list]
|
||||
all_ids = [item["_id"] for sublist in userids for item in sublist]
|
||||
return list(set(all_ids))
|
||||
|
||||
def get_trmm_mesh_users(self):
|
||||
payload = {"action": "users"}
|
||||
ret = {
|
||||
i["_id"]: i
|
||||
for i in self.mesh_action(payload=payload, wait=True)["users"]
|
||||
if re.search(r".*___\d+", i["_id"])
|
||||
}
|
||||
return ret
|
||||
|
||||
def add_users_to_node(self, *, node_id: str, user_ids: list[str]):
|
||||
|
||||
payload = {
|
||||
"action": "adddeviceuser",
|
||||
"nodeid": node_id,
|
||||
"usernames": [s.replace("user//", "") for s in user_ids],
|
||||
"rights": 4088024,
|
||||
"remove": False,
|
||||
}
|
||||
self.mesh_action(payload=payload, wait=False)
|
||||
|
||||
def delete_users_from_node(self, *, node_id: str, user_ids: list[str]):
|
||||
payload = {
|
||||
"action": "adddeviceuser",
|
||||
"nodeid": node_id,
|
||||
"userids": user_ids,
|
||||
"rights": 0,
|
||||
"remove": True,
|
||||
}
|
||||
self.mesh_action(payload=payload, wait=False)
|
||||
|
||||
def update_mesh_displayname(self, *, user_info: dict[str, Any]) -> None:
|
||||
payload = {
|
||||
"action": "edituser",
|
||||
"id": user_info["_id"],
|
||||
"realname": user_info["full_name"],
|
||||
}
|
||||
self.mesh_action(payload=payload, wait=False)
|
||||
|
||||
def add_user_to_mesh(self, *, user_info: dict[str, Any]) -> None:
|
||||
payload = {
|
||||
"action": "adduser",
|
||||
"username": user_info["username"],
|
||||
"email": user_info["email"],
|
||||
"pass": make_mesh_password(),
|
||||
"resetNextLogin": False,
|
||||
"randomPassword": False,
|
||||
"removeEvents": False,
|
||||
"emailVerified": True,
|
||||
}
|
||||
self.mesh_action(payload=payload, wait=False)
|
||||
if user_info["full_name"]:
|
||||
self.update_mesh_displayname(user_info=user_info)
|
||||
|
||||
def delete_user_from_mesh(self, *, mesh_user_id: str) -> None:
|
||||
payload = {
|
||||
"action": "deleteuser",
|
||||
"userid": mesh_user_id,
|
||||
}
|
||||
self.mesh_action(payload=payload, wait=False)
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-20 02:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0041_auto_20240128_0301"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="mesh_company_name",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-23 19:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0042_coresettings_mesh_company_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="coresettings",
|
||||
name="sync_mesh_with_trmm",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.2.11 on 2024-03-12 05:23
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0043_coresettings_sync_mesh_with_trmm"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="coresettings",
|
||||
name="mesh_disable_auto_login",
|
||||
),
|
||||
]
|
||||
@@ -1,7 +1,7 @@
|
||||
import smtplib
|
||||
from contextlib import suppress
|
||||
from email.message import EmailMessage
|
||||
from email.headerregistry import Address
|
||||
from email.message import EmailMessage
|
||||
from typing import TYPE_CHECKING, List, Optional, cast
|
||||
|
||||
import requests
|
||||
@@ -74,7 +74,8 @@ class CoreSettings(BaseAuditModel):
|
||||
mesh_device_group = models.CharField(
|
||||
max_length=255, null=True, blank=True, default="TacticalRMM"
|
||||
)
|
||||
mesh_disable_auto_login = models.BooleanField(default=False)
|
||||
mesh_company_name = models.CharField(max_length=255, null=True, blank=True)
|
||||
sync_mesh_with_trmm = models.BooleanField(default=True)
|
||||
agent_auto_update = models.BooleanField(default=True)
|
||||
workstation_policy = models.ForeignKey(
|
||||
"automation.Policy",
|
||||
@@ -121,7 +122,7 @@ class CoreSettings(BaseAuditModel):
|
||||
self.mesh_token = settings.MESH_TOKEN_KEY
|
||||
|
||||
old_settings = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
if old_settings:
|
||||
if (
|
||||
@@ -146,6 +147,11 @@ class CoreSettings(BaseAuditModel):
|
||||
def __str__(self) -> str:
|
||||
return "Global Site Settings"
|
||||
|
||||
@property
|
||||
def mesh_api_superuser(self) -> str:
|
||||
# must be lowercase otherwise mesh api breaks
|
||||
return self.mesh_username.lower()
|
||||
|
||||
@property
|
||||
def sms_is_configured(self) -> bool:
|
||||
return all(
|
||||
@@ -365,7 +371,7 @@ class CodeSignToken(models.Model):
|
||||
if not self.pk and CodeSignToken.objects.exists():
|
||||
raise ValidationError("There can only be one CodeSignToken instance")
|
||||
|
||||
super(CodeSignToken, self).save(*args, **kwargs)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
|
||||
@@ -13,7 +13,11 @@ class CoreSettingsPerms(permissions.BasePermission):
|
||||
|
||||
class URLActionPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return _has_perm(r, "can_run_urlactions")
|
||||
if r.method in {"GET", "PATCH"}:
|
||||
return _has_perm(r, "can_run_urlactions")
|
||||
|
||||
# TODO make a manage url action perm instead?
|
||||
return _has_perm(r, "can_edit_core_settings")
|
||||
|
||||
|
||||
class ServerMaintPerms(permissions.BasePermission):
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from django.conf import settings
|
||||
from rest_framework import serializers
|
||||
|
||||
from tacticalrmm.constants import ALL_TIMEZONES
|
||||
@@ -5,7 +6,19 @@ from tacticalrmm.constants import ALL_TIMEZONES
|
||||
from .models import CodeSignToken, CoreSettings, CustomField, GlobalKVStore, URLAction
|
||||
|
||||
|
||||
class CoreSettingsSerializer(serializers.ModelSerializer):
|
||||
class HostedCoreMixin:
|
||||
def to_representation(self, instance):
|
||||
ret = super().to_representation(instance) # type: ignore
|
||||
if getattr(settings, "HOSTED", False):
|
||||
for field in ("mesh_site", "mesh_token", "mesh_username"):
|
||||
ret[field] = "n/a"
|
||||
|
||||
ret["sync_mesh_with_trmm"] = True
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class CoreSettingsSerializer(HostedCoreMixin, serializers.ModelSerializer):
|
||||
all_timezones = serializers.SerializerMethodField("all_time_zones")
|
||||
|
||||
def all_time_zones(self, obj):
|
||||
@@ -17,7 +30,7 @@ class CoreSettingsSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
# for audting
|
||||
class CoreSerializer(serializers.ModelSerializer):
|
||||
class CoreSerializer(HostedCoreMixin, serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CoreSettings
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,24 +1,36 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import traceback
|
||||
from contextlib import suppress
|
||||
from time import sleep
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import nats
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.db.models import Prefetch
|
||||
from django.db.utils import DatabaseError
|
||||
from django.utils import timezone as djangotime
|
||||
from packaging import version as pyver
|
||||
|
||||
from accounts.models import User
|
||||
from accounts.utils import is_superuser
|
||||
from agents.models import Agent
|
||||
from agents.tasks import clear_faults_task, prune_agent_history
|
||||
from alerts.models import Alert
|
||||
from alerts.tasks import prune_resolved_alerts
|
||||
from autotasks.models import AutomatedTask, TaskResult
|
||||
from checks.models import Check, CheckResult
|
||||
from checks.models import Check, CheckHistory, CheckResult
|
||||
from checks.tasks import prune_check_history
|
||||
from clients.models import Client, Site
|
||||
from core.utils import get_core_settings
|
||||
from core.mesh_utils import (
|
||||
MeshSync,
|
||||
build_mesh_display_name,
|
||||
has_mesh_perms,
|
||||
transform_mesh,
|
||||
transform_trmm,
|
||||
)
|
||||
from core.models import CoreSettings
|
||||
from core.utils import get_core_settings, get_mesh_ws_url, make_alpha_numeric
|
||||
from logs.models import PendingAction
|
||||
from logs.tasks import prune_audit_log, prune_debug_log
|
||||
from tacticalrmm.celery import app
|
||||
@@ -27,6 +39,7 @@ from tacticalrmm.constants import (
|
||||
AGENT_STATUS_ONLINE,
|
||||
AGENT_STATUS_OVERDUE,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_MESH_PERMS_TASK_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
AlertSeverity,
|
||||
AlertType,
|
||||
@@ -36,15 +49,34 @@ from tacticalrmm.constants import (
|
||||
TaskSyncStatus,
|
||||
TaskType,
|
||||
)
|
||||
from tacticalrmm.helpers import setup_nats_options
|
||||
from tacticalrmm.helpers import make_random_password, setup_nats_options
|
||||
from tacticalrmm.logger import logger
|
||||
from tacticalrmm.nats_utils import a_nats_cmd
|
||||
from tacticalrmm.permissions import _has_perm_on_agent
|
||||
from tacticalrmm.utils import redis_lock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.db.models import QuerySet
|
||||
from nats.aio.client import Client as NATSClient
|
||||
|
||||
logger = logging.getLogger("trmm")
|
||||
|
||||
def remove_orphaned_history_results() -> int:
|
||||
try:
|
||||
with transaction.atomic():
|
||||
check_hist_agentids = CheckHistory.objects.values_list(
|
||||
"agent_id", flat=True
|
||||
).distinct()
|
||||
current_agentids = set(Agent.objects.values_list("agent_id", flat=True))
|
||||
orphaned_agentids = [
|
||||
i for i in check_hist_agentids if i not in current_agentids
|
||||
]
|
||||
count, _ = CheckHistory.objects.filter(
|
||||
agent_id__in=orphaned_agentids
|
||||
).delete()
|
||||
return count
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
return 0
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -53,6 +85,8 @@ def core_maintenance_tasks() -> None:
|
||||
remove_if_not_scheduled=True, expire_date__lt=djangotime.now()
|
||||
).delete()
|
||||
|
||||
remove_orphaned_history_results()
|
||||
|
||||
core = get_core_settings()
|
||||
|
||||
# remove old CheckHistory data
|
||||
@@ -361,3 +395,172 @@ def cache_db_fields_task() -> None:
|
||||
agents = qs.filter(site__client=client)
|
||||
client.failing_checks = _get_failing_data(agents)
|
||||
client.save(update_fields=["failing_checks"])
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def sync_mesh_perms_task(self):
|
||||
with redis_lock(SYNC_MESH_PERMS_TASK_LOCK, self.app.oid) as acquired:
|
||||
if not acquired:
|
||||
return f"{self.app.oid} still running"
|
||||
|
||||
try:
|
||||
core = CoreSettings.objects.first()
|
||||
do_not_sync = not core.sync_mesh_with_trmm
|
||||
uri = get_mesh_ws_url()
|
||||
ms = MeshSync(uri)
|
||||
|
||||
if do_not_sync:
|
||||
for user in ms.mesh_users:
|
||||
ms.delete_user_from_mesh(mesh_user_id=user)
|
||||
|
||||
return
|
||||
|
||||
company_name = core.mesh_company_name
|
||||
mnp = {"action": "nodes"}
|
||||
mesh_nodes_raw = ms.mesh_action(payload=mnp, wait=True)["nodes"]
|
||||
|
||||
users = User.objects.select_related("role").filter(
|
||||
agent=None,
|
||||
is_installer_user=False,
|
||||
is_active=True,
|
||||
block_dashboard_login=False,
|
||||
)
|
||||
|
||||
trmm_agents_meshnodeids = [
|
||||
f"node//{i.hex_mesh_node_id}"
|
||||
for i in Agent.objects.only("mesh_node_id")
|
||||
if i.mesh_node_id
|
||||
]
|
||||
|
||||
mesh_users_dict = {}
|
||||
for user in users:
|
||||
full_name = build_mesh_display_name(
|
||||
first_name=user.first_name,
|
||||
last_name=user.last_name,
|
||||
company_name=company_name,
|
||||
)
|
||||
|
||||
# mesh user creation will fail if same email exists for another user
|
||||
# make sure that doesn't happen by making a random email
|
||||
rand_str1 = make_random_password(len=6)
|
||||
rand_str2 = make_random_password(len=5)
|
||||
# for trmm users whos usernames are emails
|
||||
email_prefix = make_alpha_numeric(user.username)
|
||||
email = f"{email_prefix}.{rand_str1}@tacticalrmm-do-not-change-{rand_str2}.local"
|
||||
mesh_users_dict[user.mesh_user_id] = {
|
||||
"_id": user.mesh_user_id,
|
||||
"username": user.mesh_username,
|
||||
"full_name": full_name,
|
||||
"email": email,
|
||||
}
|
||||
|
||||
new_trmm_agents = []
|
||||
for agent in Agent.objects.defer(*AGENT_DEFER):
|
||||
if not agent.mesh_node_id:
|
||||
continue
|
||||
agent_dict = {
|
||||
"node_id": f"node//{agent.hex_mesh_node_id}",
|
||||
"hostname": agent.hostname,
|
||||
}
|
||||
tmp: list[dict[str, str]] = []
|
||||
for user in users:
|
||||
if not has_mesh_perms(user=user):
|
||||
logger.debug(f"No mesh perms for {user} on {agent.hostname}")
|
||||
continue
|
||||
|
||||
if (user.is_superuser or is_superuser(user)) or _has_perm_on_agent(
|
||||
user, agent.agent_id
|
||||
):
|
||||
tmp.append({"_id": user.mesh_user_id})
|
||||
|
||||
agent_dict["links"] = tmp
|
||||
new_trmm_agents.append(agent_dict)
|
||||
|
||||
final_trmm = transform_trmm(new_trmm_agents)
|
||||
final_mesh = transform_mesh(mesh_nodes_raw)
|
||||
|
||||
# delete users first
|
||||
source_users_global = set()
|
||||
for item in final_trmm:
|
||||
source_users_global.update(item["user_ids"])
|
||||
|
||||
target_users_global = set()
|
||||
for item in final_mesh:
|
||||
target_users_global.update(item["user_ids"])
|
||||
|
||||
# identify and create new users
|
||||
new_users = list(source_users_global - target_users_global)
|
||||
for user_id in new_users:
|
||||
user_info = mesh_users_dict[user_id]
|
||||
logger.info(f"Adding new user {user_info['username']} to mesh")
|
||||
ms.add_user_to_mesh(user_info=user_info)
|
||||
|
||||
users_to_delete_globally = list(target_users_global - source_users_global)
|
||||
for user_id in users_to_delete_globally:
|
||||
logger.info(f"Deleting {user_id} from mesh")
|
||||
ms.delete_user_from_mesh(mesh_user_id=user_id)
|
||||
|
||||
source_map = {item["node_id"]: set(item["user_ids"]) for item in final_trmm}
|
||||
target_map = {item["node_id"]: set(item["user_ids"]) for item in final_mesh}
|
||||
|
||||
def _get_sleep_after_n_inter(n):
|
||||
# {number of agents: chunk size}
|
||||
thresholds = {250: 150, 500: 275, 800: 300, 1000: 340}
|
||||
for threshold, value in sorted(thresholds.items()):
|
||||
if n <= threshold:
|
||||
return value
|
||||
|
||||
return 375
|
||||
|
||||
iter_count = 0
|
||||
sleep_after = _get_sleep_after_n_inter(len(source_map))
|
||||
|
||||
for node_id, source_users in source_map.items():
|
||||
# skip agents without valid node id
|
||||
if node_id not in trmm_agents_meshnodeids:
|
||||
continue
|
||||
|
||||
target_users = target_map.get(node_id, set()) - set(
|
||||
users_to_delete_globally
|
||||
)
|
||||
source_users_adjusted = source_users - set(users_to_delete_globally)
|
||||
|
||||
# find users that need to be added or deleted
|
||||
users_to_add = list(source_users_adjusted - target_users)
|
||||
users_to_delete = list(target_users - source_users_adjusted)
|
||||
|
||||
if users_to_add or users_to_delete:
|
||||
iter_count += 1
|
||||
|
||||
if users_to_add:
|
||||
logger.info(f"Adding {users_to_add} to {node_id}")
|
||||
ms.add_users_to_node(node_id=node_id, user_ids=users_to_add)
|
||||
|
||||
if users_to_delete:
|
||||
logger.info(f"Deleting {users_to_delete} from {node_id}")
|
||||
ms.delete_users_from_node(node_id=node_id, user_ids=users_to_delete)
|
||||
|
||||
if iter_count % sleep_after == 0 and iter_count != 0:
|
||||
# mesh is very inefficient with sql, give it time to catch up so we don't crash the system
|
||||
logger.info(
|
||||
f"Sleeping for 7 seconds after {iter_count} iterations."
|
||||
)
|
||||
sleep(7)
|
||||
|
||||
# after all done, see if need to update display name
|
||||
ms2 = MeshSync(uri)
|
||||
unique_ids = ms2.get_unique_mesh_users(new_trmm_agents)
|
||||
for user in unique_ids:
|
||||
try:
|
||||
mesh_realname = ms2.mesh_users[user]["realname"]
|
||||
except KeyError:
|
||||
mesh_realname = ""
|
||||
trmm_realname = mesh_users_dict[user]["full_name"]
|
||||
if mesh_realname != trmm_realname:
|
||||
logger.info(
|
||||
f"Display names don't match. Updating {user} name from {mesh_realname} to {trmm_realname}"
|
||||
)
|
||||
ms2.update_mesh_displayname(user_info=mesh_users_dict[user])
|
||||
|
||||
except Exception:
|
||||
logger.debug(traceback.format_exc())
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
import requests
|
||||
@@ -11,7 +12,7 @@ from model_bakery import baker
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
# from agents.models import Agent
|
||||
from core.utils import get_core_settings, get_meshagent_url
|
||||
from core.utils import get_core_settings, get_mesh_ws_url, get_meshagent_url
|
||||
|
||||
# from logs.models import PendingAction
|
||||
from tacticalrmm.constants import ( # PAAction,; PAStatus,
|
||||
@@ -109,18 +110,63 @@ class TestCoreTasks(TacticalTestCase):
|
||||
|
||||
def test_edit_coresettings(self):
|
||||
url = "/core/settings/"
|
||||
|
||||
# setup
|
||||
baker.make("automation.Policy", _quantity=2)
|
||||
# test normal request
|
||||
data = {
|
||||
"smtp_from_email": "newexample@example.com",
|
||||
"mesh_token": "New_Mesh_Token",
|
||||
"mesh_site": "https://mesh.example.com",
|
||||
"mesh_username": "bob",
|
||||
"sync_mesh_with_trmm": False,
|
||||
}
|
||||
r = self.client.put(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(get_core_settings().smtp_from_email, data["smtp_from_email"])
|
||||
self.assertEqual(get_core_settings().mesh_token, data["mesh_token"])
|
||||
core = get_core_settings()
|
||||
self.assertEqual(core.smtp_from_email, "newexample@example.com")
|
||||
self.assertEqual(core.mesh_token, "New_Mesh_Token")
|
||||
self.assertEqual(core.mesh_site, "https://mesh.example.com")
|
||||
self.assertEqual(core.mesh_username, "bob")
|
||||
self.assertFalse(core.sync_mesh_with_trmm)
|
||||
|
||||
# test to_representation
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.data["smtp_from_email"], "newexample@example.com")
|
||||
self.assertEqual(r.data["mesh_token"], "New_Mesh_Token")
|
||||
self.assertEqual(r.data["mesh_site"], "https://mesh.example.com")
|
||||
self.assertEqual(r.data["mesh_username"], "bob")
|
||||
self.assertFalse(r.data["sync_mesh_with_trmm"])
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@override_settings(HOSTED=True)
|
||||
def test_hosted_edit_coresettings(self):
|
||||
url = "/core/settings/"
|
||||
baker.make("automation.Policy", _quantity=2)
|
||||
data = {
|
||||
"smtp_from_email": "newexample1@example.com",
|
||||
"mesh_token": "abc123",
|
||||
"mesh_site": "https://mesh15534.example.com",
|
||||
"mesh_username": "jane",
|
||||
"sync_mesh_with_trmm": False,
|
||||
}
|
||||
r = self.client.put(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
core = get_core_settings()
|
||||
self.assertEqual(core.smtp_from_email, "newexample1@example.com")
|
||||
self.assertIn("41410834b8bb4481446027f8", core.mesh_token) # type: ignore
|
||||
self.assertTrue(core.sync_mesh_with_trmm)
|
||||
if "GHACTIONS" in os.environ:
|
||||
self.assertEqual(core.mesh_site, "https://example.com")
|
||||
self.assertEqual(core.mesh_username, "pipeline")
|
||||
|
||||
# test to_representation
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.data["smtp_from_email"], "newexample1@example.com")
|
||||
self.assertEqual(r.data["mesh_token"], "n/a")
|
||||
self.assertEqual(r.data["mesh_site"], "n/a")
|
||||
self.assertEqual(r.data["mesh_username"], "n/a")
|
||||
self.assertTrue(r.data["sync_mesh_with_trmm"])
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@@ -476,6 +522,48 @@ class TestNatsUrls(TacticalTestCase):
|
||||
self.assertEqual(get_nats_hosts(), ("0.0.0.0", "0.0.0.0", "api.example.com"))
|
||||
|
||||
|
||||
class TestMeshWSUrl(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("core.utils.get_auth_token")
|
||||
def test_standard_install(self, mock_token):
|
||||
mock_token.return_value = "abc123"
|
||||
self.assertEqual(
|
||||
get_mesh_ws_url(), "ws://127.0.0.1:4430/control.ashx?auth=abc123"
|
||||
)
|
||||
|
||||
@patch("core.utils.get_auth_token")
|
||||
@override_settings(MESH_PORT=8876)
|
||||
def test_standard_install_custom_port(self, mock_token):
|
||||
mock_token.return_value = "abc123"
|
||||
self.assertEqual(
|
||||
get_mesh_ws_url(), "ws://127.0.0.1:8876/control.ashx?auth=abc123"
|
||||
)
|
||||
|
||||
@patch("core.utils.get_auth_token")
|
||||
@override_settings(DOCKER_BUILD=True, MESH_WS_URL="ws://tactical-meshcentral:4443")
|
||||
def test_docker_install(self, mock_token):
|
||||
mock_token.return_value = "abc123"
|
||||
self.assertEqual(
|
||||
get_mesh_ws_url(), "ws://tactical-meshcentral:4443/control.ashx?auth=abc123"
|
||||
)
|
||||
|
||||
@patch("core.utils.get_auth_token")
|
||||
@override_settings(USE_EXTERNAL_MESH=True)
|
||||
def test_external_mesh(self, mock_token):
|
||||
mock_token.return_value = "abc123"
|
||||
|
||||
from core.models import CoreSettings
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
core.mesh_site = "https://mesh.external.com" # type: ignore
|
||||
core.save(update_fields=["mesh_site"]) # type: ignore
|
||||
self.assertEqual(
|
||||
get_mesh_ws_url(), "wss://mesh.external.com/control.ashx?auth=abc123"
|
||||
)
|
||||
|
||||
|
||||
class TestCorePermissions(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_client()
|
||||
|
||||
@@ -16,6 +16,7 @@ from tacticalrmm.constants import (
|
||||
AGENT_TBL_PEND_ACTION_CNT_CACHE_PREFIX,
|
||||
CORESETTINGS_CACHE_KEY,
|
||||
ROLE_CACHE_PREFIX,
|
||||
TRMM_WS_MAX_SIZE,
|
||||
AgentPlat,
|
||||
MeshAgentIdent,
|
||||
)
|
||||
@@ -83,23 +84,23 @@ def get_core_settings() -> "CoreSettings":
|
||||
|
||||
def get_mesh_ws_url() -> str:
|
||||
core = get_core_settings()
|
||||
token = get_auth_token(core.mesh_username, core.mesh_token)
|
||||
token = get_auth_token(core.mesh_api_superuser, core.mesh_token)
|
||||
|
||||
if settings.DOCKER_BUILD:
|
||||
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
|
||||
else:
|
||||
if getattr(settings, "TRMM_INSECURE", False):
|
||||
site = core.mesh_site.replace("https", "ws")
|
||||
uri = f"{site}:4430/control.ashx?auth={token}"
|
||||
else:
|
||||
if getattr(settings, "USE_EXTERNAL_MESH", False):
|
||||
site = core.mesh_site.replace("https", "wss")
|
||||
uri = f"{site}/control.ashx?auth={token}"
|
||||
else:
|
||||
mesh_port = getattr(settings, "MESH_PORT", 4430)
|
||||
uri = f"ws://127.0.0.1:{mesh_port}/control.ashx?auth={token}"
|
||||
|
||||
return uri
|
||||
|
||||
|
||||
async def get_mesh_device_id(uri: str, device_group: str) -> None:
|
||||
async with websockets.connect(uri) as ws:
|
||||
async with websockets.connect(uri, max_size=TRMM_WS_MAX_SIZE) as ws:
|
||||
payload = {"action": "meshes", "responseid": "meshctrl"}
|
||||
await ws.send(json.dumps(payload))
|
||||
|
||||
@@ -204,3 +205,7 @@ def get_meshagent_url(
|
||||
}
|
||||
|
||||
return base + "/meshagents?" + urllib.parse.urlencode(params)
|
||||
|
||||
|
||||
def make_alpha_numeric(s: str):
|
||||
return "".join(filter(str.isalnum, s))
|
||||
|
||||
@@ -15,12 +15,13 @@ from django.views.decorators.csrf import csrf_exempt
|
||||
from redis import from_url
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from core.decorators import monitoring_view
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from core.utils import get_core_settings, sysd_svc_is_running, token_is_valid
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.constants import AuditActionType, PAStatus
|
||||
@@ -56,14 +57,29 @@ class GetEditCoreSettings(APIView):
|
||||
return Response(CoreSettingsSerializer(settings).data)
|
||||
|
||||
def put(self, request):
|
||||
data = request.data.copy()
|
||||
|
||||
if getattr(settings, "HOSTED", False):
|
||||
data.pop("mesh_site")
|
||||
data.pop("mesh_token")
|
||||
data.pop("mesh_username")
|
||||
data["sync_mesh_with_trmm"] = True
|
||||
|
||||
coresettings = CoreSettings.objects.first()
|
||||
serializer = CoreSettingsSerializer(instance=coresettings, data=request.data)
|
||||
serializer = CoreSettingsSerializer(instance=coresettings, data=data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([AllowAny])
|
||||
def home(request):
|
||||
return Response({"status": "ok"})
|
||||
|
||||
|
||||
@api_view()
|
||||
def version(request):
|
||||
return Response(settings.APP_VER)
|
||||
@@ -315,7 +331,7 @@ class UpdateDeleteKeyStore(APIView):
|
||||
|
||||
|
||||
class GetAddURLAction(APIView):
|
||||
permission_classes = [IsAuthenticated, CoreSettingsPerms]
|
||||
permission_classes = [IsAuthenticated, URLActionPerms]
|
||||
|
||||
def get(self, request):
|
||||
actions = URLAction.objects.all()
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("logs", "0024_remove_pendingaction_cancelable_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="auditlog",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="debuglog",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="pendingaction",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -26,6 +26,7 @@ def get_debug_level() -> str:
|
||||
|
||||
|
||||
class AuditLog(models.Model):
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
username = models.CharField(max_length=255)
|
||||
agent = models.CharField(max_length=255, null=True, blank=True)
|
||||
agent_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
@@ -47,7 +48,7 @@ class AuditLog(models.Model):
|
||||
(self.message[:253] + "..") if len(self.message) > 255 else self.message
|
||||
)
|
||||
|
||||
return super(AuditLog, self).save(*args, **kwargs)
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def audit_mesh_session(
|
||||
@@ -258,6 +259,7 @@ class AuditLog(models.Model):
|
||||
class DebugLog(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
entry_time = models.DateTimeField(auto_now_add=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
@@ -347,6 +349,7 @@ class DebugLog(models.Model):
|
||||
class PendingAction(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="pendingactions",
|
||||
@@ -454,10 +457,10 @@ class BaseAuditModel(models.Model):
|
||||
debug_info=get_debug_info(),
|
||||
)
|
||||
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs) -> Tuple[int, Dict[str, int]]:
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
username = get_username()
|
||||
if username:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
black
|
||||
daphne==4.0.0
|
||||
daphne==4.1.0
|
||||
Werkzeug
|
||||
django-extensions
|
||||
isort
|
||||
|
||||
@@ -7,4 +7,4 @@ pytest-xdist
|
||||
pytest-cov
|
||||
refurb
|
||||
flake8
|
||||
daphne==4.0.0
|
||||
daphne==4.1.0
|
||||
@@ -1,46 +1,46 @@
|
||||
adrf==0.1.3
|
||||
adrf==0.1.5
|
||||
asgiref==3.7.2
|
||||
celery==5.3.6
|
||||
certifi==2024.2.2
|
||||
cffi==1.16.0
|
||||
channels==4.0.0
|
||||
channels_redis==4.2.0
|
||||
cryptography==42.0.2
|
||||
Django==4.2.9
|
||||
cryptography==42.0.5
|
||||
Django==4.2.11
|
||||
django-cors-headers==4.3.1
|
||||
django-filter==23.5
|
||||
django-filter==24.2
|
||||
django-rest-knox==4.2.0
|
||||
djangorestframework==3.14.0
|
||||
drf-spectacular==0.27.1
|
||||
hiredis==2.3.2
|
||||
meshctrl==0.1.15
|
||||
msgpack==1.0.7
|
||||
nats-py==2.6.0
|
||||
packaging==23.2
|
||||
msgpack==1.0.8
|
||||
nats-py==2.7.2
|
||||
packaging==24.0
|
||||
psutil==5.9.8
|
||||
psycopg[binary]==3.1.17
|
||||
psycopg[binary]==3.1.18
|
||||
pycparser==2.21
|
||||
pycryptodome==3.20.0
|
||||
pyotp==2.9.0
|
||||
pyparsing==3.1.1
|
||||
python-ipware==2.0.1
|
||||
pyparsing==3.1.2
|
||||
python-ipware==2.0.2
|
||||
qrcode==7.4.2
|
||||
redis==5.0.1
|
||||
redis==5.0.3
|
||||
requests==2.31.0
|
||||
six==1.16.0
|
||||
sqlparse==0.4.4
|
||||
twilio==8.12.0
|
||||
urllib3==2.2.0
|
||||
uvicorn[standard]==0.27.0
|
||||
uWSGI==2.0.23
|
||||
validators==0.22.0
|
||||
twilio==8.13.0
|
||||
urllib3==2.2.1
|
||||
uvicorn[standard]==0.29.0
|
||||
uWSGI==2.0.24
|
||||
validators==0.24.0
|
||||
vine==5.1.0
|
||||
websockets==12.0
|
||||
zipp==3.17.0
|
||||
pandas==2.2.0
|
||||
zipp==3.18.1
|
||||
pandas==2.2.1
|
||||
kaleido==0.2.1
|
||||
jinja2==3.1.3
|
||||
markdown==3.5.2
|
||||
plotly==5.18.0
|
||||
weasyprint==60.2
|
||||
markdown==3.6
|
||||
plotly==5.20.0
|
||||
weasyprint==61.2
|
||||
ocxsect==0.1.5
|
||||
@@ -0,0 +1,45 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-22 04:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("scripts", "0019_script_env_vars"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="script",
|
||||
name="shell",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("powershell", "Powershell"),
|
||||
("cmd", "Batch (CMD)"),
|
||||
("python", "Python"),
|
||||
("shell", "Shell"),
|
||||
("nushell", "Nushell"),
|
||||
("deno", "Deno"),
|
||||
],
|
||||
default="powershell",
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="scriptsnippet",
|
||||
name="shell",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("powershell", "Powershell"),
|
||||
("cmd", "Batch (CMD)"),
|
||||
("python", "Python"),
|
||||
("shell", "Shell"),
|
||||
("nushell", "Nushell"),
|
||||
("deno", "Deno"),
|
||||
],
|
||||
default="powershell",
|
||||
max_length=15,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,7 @@
|
||||
import asyncio
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from agents.models import Agent, AgentHistory
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
@@ -78,6 +80,8 @@ def bulk_script_task(
|
||||
},
|
||||
"run_as_user": run_as_user,
|
||||
"env_vars": script.parse_script_env_vars(agent, script.shell, env_vars),
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
tup = (agent.agent_id, data)
|
||||
items.append(tup)
|
||||
|
||||
@@ -5,6 +5,7 @@ from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from django.conf import settings
|
||||
|
||||
from agents.permissions import RunScriptPerms
|
||||
from tacticalrmm.constants import ScriptShell, ScriptType
|
||||
@@ -162,6 +163,8 @@ class TestScript(APIView):
|
||||
},
|
||||
"run_as_user": request.data["run_as_user"],
|
||||
"env_vars": parsed_env_vars,
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
|
||||
r = asyncio.run(
|
||||
@@ -190,6 +193,10 @@ def download(request, pk):
|
||||
ext = ".py"
|
||||
case ScriptShell.SHELL:
|
||||
ext = ".sh"
|
||||
case ScriptShell.NUSHELL:
|
||||
ext = ".nu"
|
||||
case ScriptShell.DENO:
|
||||
ext = ".ts"
|
||||
case _:
|
||||
ext = ""
|
||||
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("software", "0003_delete_chocolog"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installedsoftware",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -15,6 +15,7 @@ class ChocoSoftware(models.Model):
|
||||
class InstalledSoftware(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(Agent, on_delete=models.CASCADE)
|
||||
software = models.JSONField()
|
||||
|
||||
|
||||
@@ -9,7 +9,8 @@ from django.conf import settings
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tacticalrmm.settings")
|
||||
|
||||
app = Celery("tacticalrmm", backend="redis://" + settings.REDIS_HOST, broker="redis://" + settings.REDIS_HOST) # type: ignore
|
||||
redis_host = f"redis://{settings.REDIS_HOST}"
|
||||
app = Celery("tacticalrmm", backend=redis_host, broker=redis_host)
|
||||
app.accept_content = ["application/json"]
|
||||
app.result_serializer = "json"
|
||||
app.task_serializer = "json"
|
||||
@@ -34,7 +35,7 @@ app.conf.beat_schedule = {
|
||||
},
|
||||
"remove-orphaned-tasks": {
|
||||
"task": "autotasks.tasks.remove_orphaned_win_tasks",
|
||||
"schedule": crontab(minute=50, hour="12"),
|
||||
"schedule": crontab(minute=50, hour="*/2"),
|
||||
},
|
||||
"agent-outages-task": {
|
||||
"task": "agents.tasks.agent_outages_task",
|
||||
@@ -56,6 +57,10 @@ app.conf.beat_schedule = {
|
||||
"task": "core.tasks.sync_scheduled_tasks",
|
||||
"schedule": crontab(minute="*/2", hour="*"),
|
||||
},
|
||||
"sync-mesh-perms-task": {
|
||||
"task": "core.tasks.sync_mesh_perms_task",
|
||||
"schedule": crontab(minute="*/4", hour="*"),
|
||||
},
|
||||
"resolve-pending-actions": {
|
||||
"task": "core.tasks.resolve_pending_actions",
|
||||
"schedule": timedelta(seconds=100.0),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import zoneinfo
|
||||
from enum import Enum
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
|
||||
|
||||
@@ -30,6 +31,10 @@ RESOLVE_ALERTS_LOCK = "resolve-alerts-lock-key"
|
||||
SYNC_SCHED_TASK_LOCK = "sync-sched-tasks-lock-key"
|
||||
AGENT_OUTAGES_LOCK = "agent-outages-task-lock-key"
|
||||
ORPHANED_WIN_TASK_LOCK = "orphaned-win-task-lock-key"
|
||||
SYNC_MESH_PERMS_TASK_LOCK = "sync-mesh-perms-lock-key"
|
||||
|
||||
TRMM_WS_MAX_SIZE = getattr(settings, "TRMM_WS_MAX_SIZE", 100 * 2**20)
|
||||
TRMM_MAX_REQUEST_SIZE = getattr(settings, "TRMM_MAX_REQUEST_SIZE", 10 * 2**20)
|
||||
|
||||
|
||||
class GoArch(models.TextChoices):
|
||||
@@ -132,6 +137,8 @@ class ScriptShell(models.TextChoices):
|
||||
CMD = "cmd", "Batch (CMD)"
|
||||
PYTHON = "python", "Python"
|
||||
SHELL = "shell", "Shell"
|
||||
NUSHELL = "nushell", "Nushell"
|
||||
DENO = "deno", "Deno"
|
||||
|
||||
|
||||
class ScriptType(models.TextChoices):
|
||||
|
||||
3
api/tacticalrmm/tacticalrmm/logger.py
Normal file
3
api/tacticalrmm/tacticalrmm/logger.py
Normal file
@@ -0,0 +1,3 @@
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("trmm")
|
||||
@@ -4,6 +4,7 @@ from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.constants import AGENT_DEFER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from accounts.models import User
|
||||
@@ -33,7 +34,10 @@ def _has_perm_on_agent(user: "User", agent_id: str) -> bool:
|
||||
elif not role:
|
||||
return False
|
||||
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
agent = get_object_or_404(
|
||||
Agent.objects.defer(*AGENT_DEFER).select_related("site__client"),
|
||||
agent_id=agent_id,
|
||||
)
|
||||
can_view_clients = role.can_view_clients.all() if role else None
|
||||
can_view_sites = role.can_view_sites.all() if role else None
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import sys
|
||||
from contextlib import suppress
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
@@ -20,27 +21,70 @@ MAC_UNINSTALL = BASE_DIR / "core" / "mac_uninstall.sh"
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# latest release
|
||||
TRMM_VERSION = "0.17.5"
|
||||
TRMM_VERSION = "0.18.2"
|
||||
|
||||
# https://github.com/amidaware/tacticalrmm-web
|
||||
WEB_VERSION = "0.101.40"
|
||||
WEB_VERSION = "0.101.44"
|
||||
|
||||
# bump this version everytime vue code is changed
|
||||
# to alert user they need to manually refresh their browser
|
||||
APP_VER = "0.0.190"
|
||||
APP_VER = "0.0.192"
|
||||
|
||||
# https://github.com/amidaware/rmmagent
|
||||
LATEST_AGENT_VER = "2.6.2"
|
||||
LATEST_AGENT_VER = "2.7.0"
|
||||
|
||||
MESH_VER = "1.1.20"
|
||||
MESH_VER = "1.1.21"
|
||||
|
||||
NATS_SERVER_VER = "2.10.10"
|
||||
NATS_SERVER_VER = "2.10.12"
|
||||
|
||||
# Install Nushell on the agent
|
||||
# https://github.com/nushell/nushell
|
||||
INSTALL_NUSHELL = True
|
||||
# GitHub version to download. The file will be downloaded from GitHub, extracted and installed.
|
||||
# Version to download. If INSTALL_NUSHELL_URL is not provided, the file will be downloaded from GitHub,
|
||||
# extracted and installed.
|
||||
INSTALL_NUSHELL_VERSION = "0.92.1"
|
||||
# URL to download directly. This is expected to be the direct URL, unauthenticated, uncompressed, ready to be installed.
|
||||
# Use {OS}, {ARCH} and {VERSION} to specify the GOOS, GOARCH and INSTALL_NUSHELL_VERSION respectively.
|
||||
# Windows: The ".exe" extension will be added automatically.
|
||||
# Examples:
|
||||
# https://examplle.com/download/nushell/{OS}/{ARCH}/{VERSION}/nu
|
||||
# https://examplle.com/download/nushell/nu-{VERSION}-{OS}-{ARCH}
|
||||
INSTALL_NUSHELL_URL = ""
|
||||
# Enable Nushell config on the agent
|
||||
# The default is to not enable the config because it could change how scripts run.
|
||||
# However, disabling the config prevents plugins from being registered.
|
||||
# https://github.com/nushell/nushell/issues/10754
|
||||
# False: --no-config-file option is added to the command line.
|
||||
# True: --config and --env-config options are added to the command line and point to the Agent's directory.
|
||||
NUSHELL_ENABLE_CONFIG = False
|
||||
|
||||
# Install Deno on the agent
|
||||
# https://github.com/denoland/deno
|
||||
INSTALL_DENO = True
|
||||
# Version to download. If INSTALL_DENO_URL is not provided, the file will be downloaded from GitHub,
|
||||
# extracted and installed.
|
||||
INSTALL_DENO_VERSION = "v1.42.1"
|
||||
# URL to download directly. This is expected to be the direct URL, unauthenticated, uncompressed, ready to be installed.
|
||||
# Use {OS}, {ARCH} and {VERSION} to specify the GOOS, GOARCH and INSTALL_DENO_VERSION respectively.
|
||||
# Windows: The ".exe" extension will be added automatically.
|
||||
# Examples:
|
||||
# https://examplle.com/download/deno/{OS}/{ARCH}/{VERSION}/deno
|
||||
# https://examplle.com/download/deno/deno-{VERSION}-{OS}-{ARCH}
|
||||
INSTALL_DENO_URL = ""
|
||||
# Default permissions for Deno
|
||||
# Space separated list of permissions as listed in the documentation.
|
||||
# https://docs.deno.com/runtime/manual/basics/permissions#permissions
|
||||
# Examples:
|
||||
# DENO_DEFAULT_PERMISSIONS = "--allow-sys --allow-net --allow-env"
|
||||
# DENO_DEFAULT_PERMISSIONS = "--allow-all"
|
||||
DENO_DEFAULT_PERMISSIONS = "--allow-all"
|
||||
|
||||
# for the update script, bump when need to recreate venv
|
||||
PIP_VER = "42"
|
||||
PIP_VER = "43"
|
||||
|
||||
SETUPTOOLS_VER = "69.0.3"
|
||||
WHEEL_VER = "0.42.0"
|
||||
SETUPTOOLS_VER = "69.2.0"
|
||||
WHEEL_VER = "0.43.0"
|
||||
|
||||
AGENT_BASE_URL = "https://agents.tacticalrmm.com"
|
||||
|
||||
@@ -71,6 +115,7 @@ HOSTED = False
|
||||
SWAGGER_ENABLED = False
|
||||
REDIS_HOST = "127.0.0.1"
|
||||
TRMM_LOG_LEVEL = "ERROR"
|
||||
TRMM_LOG_TO = "file"
|
||||
|
||||
with suppress(ImportError):
|
||||
from .local_settings import * # noqa
|
||||
@@ -240,6 +285,24 @@ def get_log_level() -> str:
|
||||
return TRMM_LOG_LEVEL
|
||||
|
||||
|
||||
def configure_logging_handler():
|
||||
cfg = {
|
||||
"level": get_log_level(),
|
||||
"formatter": "verbose",
|
||||
}
|
||||
|
||||
log_to = os.getenv("TRMM_LOG_TO", TRMM_LOG_TO)
|
||||
|
||||
if log_to == "stdout":
|
||||
cfg["class"] = "logging.StreamHandler"
|
||||
cfg["stream"] = sys.stdout
|
||||
else:
|
||||
cfg["class"] = "logging.FileHandler"
|
||||
cfg["filename"] = os.path.join(LOG_DIR, "trmm_debug.log")
|
||||
|
||||
return cfg
|
||||
|
||||
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
@@ -256,12 +319,7 @@ LOGGING = {
|
||||
"filename": os.path.join(LOG_DIR, "django_debug.log"),
|
||||
"formatter": "verbose",
|
||||
},
|
||||
"trmm": {
|
||||
"level": get_log_level(),
|
||||
"class": "logging.FileHandler",
|
||||
"filename": os.path.join(LOG_DIR, "trmm_debug.log"),
|
||||
"formatter": "verbose",
|
||||
},
|
||||
"trmm": configure_logging_handler(),
|
||||
},
|
||||
"loggers": {
|
||||
"django.request": {"handlers": ["file"], "level": "ERROR", "propagate": True},
|
||||
|
||||
@@ -18,3 +18,11 @@ class AgentCheckInConfig(TRMMStruct):
|
||||
checkin_wmi: int
|
||||
checkin_syncmesh: int
|
||||
limit_data: bool
|
||||
install_nushell: bool
|
||||
install_nushell_version: str
|
||||
install_nushell_url: str
|
||||
nushell_enable_config: bool
|
||||
install_deno: bool
|
||||
install_deno_version: str
|
||||
install_deno_url: str
|
||||
deno_default_permissions: str
|
||||
|
||||
@@ -5,6 +5,7 @@ from knox import views as knox_views
|
||||
from accounts.views import CheckCreds, LoginView
|
||||
from agents.consumers import SendCMD
|
||||
from core.consumers import DashInfo
|
||||
from core.views import home
|
||||
|
||||
|
||||
class AgentIDConverter:
|
||||
@@ -20,6 +21,7 @@ class AgentIDConverter:
|
||||
register_converter(AgentIDConverter, "agent")
|
||||
|
||||
urlpatterns = [
|
||||
path("", home),
|
||||
path("checkcreds/", CheckCreds.as_view()),
|
||||
path("login/", LoginView.as_view()),
|
||||
path("logout/", knox_views.LogoutView.as_view()),
|
||||
|
||||
@@ -403,6 +403,17 @@ def replace_arg_db_values(
|
||||
elif value is True or value is False:
|
||||
return format_shell_bool(value, shell)
|
||||
|
||||
elif isinstance(value, dict):
|
||||
return json.dumps(value)
|
||||
|
||||
# return str for everything else
|
||||
try:
|
||||
ret = str(value)
|
||||
except Exception:
|
||||
ret = ""
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def format_shell_array(value: list[str]) -> str:
|
||||
temp_string = ""
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("winupdate", "0012_auto_20220227_0554"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="winupdate",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -39,6 +39,7 @@ SCHEDULE_FREQUENCY_CHOICES = [
|
||||
|
||||
|
||||
class WinUpdate(models.Model):
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
Agent, related_name="winupdates", on_delete=models.CASCADE
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM nats:2.10.9-alpine
|
||||
FROM nats:2.10.12-alpine
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# creates python virtual env
|
||||
FROM python:3.11.6-slim AS CREATE_VENV_STAGE
|
||||
FROM python:3.11.8-slim AS CREATE_VENV_STAGE
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
@@ -21,14 +21,14 @@ RUN apt-get update && \
|
||||
pip install --no-cache-dir -r ${TACTICAL_TMP_DIR}/api/requirements.txt
|
||||
|
||||
# pulls community scripts from git repo
|
||||
FROM python:3.11.6-slim AS GET_SCRIPTS_STAGE
|
||||
FROM python:3.11.8-slim AS GET_SCRIPTS_STAGE
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends git && \
|
||||
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
||||
|
||||
# runtime image
|
||||
FROM python:3.11.6-slim
|
||||
FROM python:3.11.8-slim
|
||||
|
||||
# set env variables
|
||||
ENV VIRTUAL_ENV /opt/venv
|
||||
|
||||
4
go.mod
4
go.mod
@@ -1,11 +1,11 @@
|
||||
module github.com/amidaware/tacticalrmm
|
||||
|
||||
go 1.21.6
|
||||
go 1.21.8
|
||||
|
||||
require (
|
||||
github.com/jmoiron/sqlx v1.3.5
|
||||
github.com/lib/pq v1.10.9
|
||||
github.com/nats-io/nats.go v1.32.0
|
||||
github.com/nats-io/nats.go v1.34.0
|
||||
github.com/ugorji/go/codec v1.2.12
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20220227075846-f9f757361139
|
||||
)
|
||||
|
||||
4
go.sum
4
go.sum
@@ -12,8 +12,8 @@ github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
|
||||
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/nats-io/nats.go v1.32.0 h1:Bx9BZS+aXYlxW08k8Gd3yR2s73pV5XSoAQUyp1Kwvp0=
|
||||
github.com/nats-io/nats.go v1.32.0/go.mod h1:Ubdu4Nh9exXdSz0RVWRFBbRfrbSxOYd26oF0wkWclB8=
|
||||
github.com/nats-io/nats.go v1.34.0 h1:fnxnPCNiwIG5w08rlMcEKTUw4AV/nKyGCOJE8TdhSPk=
|
||||
github.com/nats-io/nats.go v1.34.0/go.mod h1:Ubdu4Nh9exXdSz0RVWRFBbRfrbSxOYd26oF0wkWclB8=
|
||||
github.com/nats-io/nkeys v0.4.7 h1:RwNJbbIdYCoClSDNY7QVKZlyb/wfT6ugvFCiKy6vDvI=
|
||||
github.com/nats-io/nkeys v0.4.7/go.mod h1:kqXRgRDPlGy7nGaEDMuYzmiJCIAAWDK0IMBtDmGD0nc=
|
||||
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
|
||||
|
||||
68
install.sh
68
install.sh
@@ -1,9 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_VERSION="82"
|
||||
SCRIPT_VERSION="83"
|
||||
SCRIPT_URL="https://raw.githubusercontent.com/amidaware/tacticalrmm/master/install.sh"
|
||||
|
||||
sudo apt install -y curl wget dirmngr gnupg lsb-release ca-certificates
|
||||
sudo apt install -y software-properties-common
|
||||
sudo apt update
|
||||
sudo apt install -y openssl
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
@@ -12,7 +15,7 @@ RED='\033[0;31m'
|
||||
NC='\033[0m'
|
||||
|
||||
SCRIPTS_DIR='/opt/trmm-community-scripts'
|
||||
PYTHON_VER='3.11.6'
|
||||
PYTHON_VER='3.11.8'
|
||||
SETTINGS_FILE='/rmm/api/tacticalrmm/tacticalrmm/settings.py'
|
||||
local_settings='/rmm/api/tacticalrmm/tacticalrmm/local_settings.py'
|
||||
|
||||
@@ -30,6 +33,11 @@ fi
|
||||
|
||||
rm -f $TMP_FILE
|
||||
|
||||
if [ -d /rmm/api/tacticalrmm ]; then
|
||||
echo -ne "${RED}ERROR: Existing trmm installation found. The install script must be run on a clean server.${NC}\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
arch=$(uname -m)
|
||||
if [[ "$arch" != "x86_64" ]] && [[ "$arch" != "aarch64" ]]; then
|
||||
echo -ne "${RED}ERROR: Only x86_64 and aarch64 is supported, not ${arch}${NC}\n"
|
||||
@@ -113,6 +121,14 @@ print_green() {
|
||||
printf >&2 "\n"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
printf >&2 "${RED}${1}${NC}\n"
|
||||
}
|
||||
|
||||
print_yellow() {
|
||||
printf >&2 "${YELLOW}${1}${NC}\n"
|
||||
}
|
||||
|
||||
cls
|
||||
|
||||
while [[ $rmmdomain != *[.]*[.]* ]]; do
|
||||
@@ -138,6 +154,34 @@ while [[ $letsemail != *[@]*[.]* ]]; do
|
||||
read letsemail
|
||||
done
|
||||
|
||||
byocert=false
|
||||
if [[ $* == *--use-own-cert* ]]; then
|
||||
byocert=true
|
||||
fi
|
||||
|
||||
if [[ "$byocert" = true ]]; then
|
||||
while true; do
|
||||
|
||||
print_yellow "Please enter the full path to your fullchain.pem file:"
|
||||
read -r fullchain_path
|
||||
print_yellow "Please enter the full path to your privkey.pem file:"
|
||||
read -r privkey_path
|
||||
|
||||
if [[ ! -f "$fullchain_path" || ! -f "$privkey_path" ]]; then
|
||||
print_error "One or both files do not exist. Please try again."
|
||||
continue
|
||||
fi
|
||||
|
||||
openssl x509 -in "$fullchain_path" -noout >/dev/null
|
||||
if [[ $? -ne 0 ]]; then
|
||||
print_error "ERROR: The provided file is not a valid certificate."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
break
|
||||
done
|
||||
fi
|
||||
|
||||
if grep -q manage_etc_hosts /etc/hosts; then
|
||||
sudo sed -i '/manage_etc_hosts: true/d' /etc/cloud/cloud.cfg >/dev/null
|
||||
echo -e "\nmanage_etc_hosts: false" | sudo tee --append /etc/cloud/cloud.cfg >/dev/null
|
||||
@@ -167,10 +211,6 @@ if [[ $* == *--insecure* ]]; then
|
||||
insecure=true
|
||||
fi
|
||||
|
||||
sudo apt install -y software-properties-common
|
||||
sudo apt update
|
||||
sudo apt install -y openssl
|
||||
|
||||
if [[ "$insecure" = true ]]; then
|
||||
print_green 'Generating self-signed cert'
|
||||
certdir='/etc/ssl/tactical'
|
||||
@@ -183,6 +223,10 @@ if [[ "$insecure" = true ]]; then
|
||||
-nodes -keyout ${CERT_PRIV_KEY} -out ${CERT_PUB_KEY} -subj "/CN=${rootdomain}" \
|
||||
-addext "subjectAltName=DNS:${rootdomain},DNS:*.${rootdomain}"
|
||||
|
||||
elif [[ "$byocert" = true ]]; then
|
||||
CERT_PRIV_KEY=$privkey_path
|
||||
CERT_PUB_KEY=$fullchain_path
|
||||
sudo chown ${USER}:${USER} $CERT_PRIV_KEY $CERT_PUB_KEY
|
||||
else
|
||||
sudo apt install -y certbot
|
||||
print_green 'Getting wildcard cert'
|
||||
@@ -447,7 +491,6 @@ DATABASES = {
|
||||
|
||||
MESH_USERNAME = "${meshusername}"
|
||||
MESH_SITE = "https://${meshdomain}"
|
||||
REDIS_HOST = "localhost"
|
||||
ADMIN_ENABLED = True
|
||||
EOF
|
||||
)"
|
||||
@@ -457,6 +500,16 @@ if [[ "$insecure" = true ]]; then
|
||||
echo "TRMM_INSECURE = True" | tee --append $local_settings >/dev/null
|
||||
fi
|
||||
|
||||
if [[ "$byocert" = true ]]; then
|
||||
owncerts="$(
|
||||
cat <<EOF
|
||||
CERT_FILE = "${CERT_PUB_KEY}"
|
||||
KEY_FILE = "${CERT_PRIV_KEY}"
|
||||
EOF
|
||||
)"
|
||||
echo "${owncerts}" | tee --append $local_settings >/dev/null
|
||||
fi
|
||||
|
||||
if [ "$arch" = "x86_64" ]; then
|
||||
natsapi='nats-api'
|
||||
else
|
||||
@@ -970,6 +1023,7 @@ cd /rmm/api/tacticalrmm
|
||||
source /rmm/api/env/bin/activate
|
||||
python manage.py initial_db_setup
|
||||
python manage.py reload_nats
|
||||
python manage.py sync_mesh_with_trmm
|
||||
deactivate
|
||||
sudo systemctl start nats.service
|
||||
|
||||
|
||||
2
main.go
2
main.go
@@ -12,7 +12,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
version = "3.5.2"
|
||||
version = "3.5.4"
|
||||
log = logrus.New()
|
||||
)
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_VERSION="56"
|
||||
SCRIPT_VERSION="57"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/restore.sh'
|
||||
|
||||
sudo apt update
|
||||
@@ -13,7 +13,7 @@ RED='\033[0;31m'
|
||||
NC='\033[0m'
|
||||
|
||||
SCRIPTS_DIR='/opt/trmm-community-scripts'
|
||||
PYTHON_VER='3.11.6'
|
||||
PYTHON_VER='3.11.8'
|
||||
SETTINGS_FILE='/rmm/api/tacticalrmm/tacticalrmm/settings.py'
|
||||
|
||||
TMP_FILE=$(mktemp -p "" "rmmrestore_XXXXXXXXXX")
|
||||
@@ -29,6 +29,11 @@ fi
|
||||
|
||||
rm -f $TMP_FILE
|
||||
|
||||
if [ -d /rmm/api/tacticalrmm ]; then
|
||||
echo -ne "${RED}ERROR: Existing trmm installation found. The restore script must be run on a clean server, please re-read the docs.${NC}\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
arch=$(uname -m)
|
||||
if [[ "$arch" != "x86_64" ]] && [[ "$arch" != "aarch64" ]]; then
|
||||
echo -ne "${RED}ERROR: Only x86_64 and aarch64 is supported, not ${arch}${NC}\n"
|
||||
|
||||
79
update.sh
79
update.sh
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_VERSION="151"
|
||||
SCRIPT_VERSION="152"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/update.sh'
|
||||
LATEST_SETTINGS_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/api/tacticalrmm/tacticalrmm/settings.py'
|
||||
YELLOW='\033[1;33m'
|
||||
@@ -10,8 +10,9 @@ NC='\033[0m'
|
||||
THIS_SCRIPT=$(readlink -f "$0")
|
||||
|
||||
SCRIPTS_DIR='/opt/trmm-community-scripts'
|
||||
PYTHON_VER='3.11.6'
|
||||
PYTHON_VER='3.11.8'
|
||||
SETTINGS_FILE='/rmm/api/tacticalrmm/tacticalrmm/settings.py'
|
||||
local_settings='/rmm/api/tacticalrmm/tacticalrmm/local_settings.py'
|
||||
|
||||
TMP_FILE=$(mktemp -p "" "rmmupdate_XXXXXXXXXX")
|
||||
curl -s -L "${SCRIPT_URL}" >${TMP_FILE}
|
||||
@@ -249,8 +250,50 @@ if ! which npm >/dev/null; then
|
||||
sudo apt install -y npm
|
||||
fi
|
||||
|
||||
# older distros still might not have npm after above command, due to recent changes to node apt packages which replaces nodesource with official node
|
||||
# if we still don't have npm, force a switch to nodesource
|
||||
if ! which npm >/dev/null; then
|
||||
sudo systemctl stop meshcentral
|
||||
sudo chown ${USER}:${USER} -R /meshcentral
|
||||
sudo apt remove -y nodejs
|
||||
sudo rm -rf /usr/lib/node_modules
|
||||
|
||||
curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash - && sudo apt-get install -y nodejs
|
||||
sudo npm install -g npm
|
||||
|
||||
cd /meshcentral
|
||||
rm -rf node_modules/ package-lock.json
|
||||
npm install
|
||||
sudo systemctl start meshcentral
|
||||
fi
|
||||
|
||||
sudo npm install -g npm
|
||||
|
||||
CURRENT_MESH_VER=$(cd /meshcentral/node_modules/meshcentral && node -p -e "require('./package.json').version")
|
||||
if [[ "${CURRENT_MESH_VER}" != "${LATEST_MESH_VER}" ]] || [[ "$force" = true ]]; then
|
||||
printf >&2 "${GREEN}Updating meshcentral from ${CURRENT_MESH_VER} to ${LATEST_MESH_VER}${NC}\n"
|
||||
sudo systemctl stop meshcentral
|
||||
sudo chown ${USER}:${USER} -R /meshcentral
|
||||
cd /meshcentral
|
||||
rm -rf node_modules/ package.json package-lock.json
|
||||
mesh_pkg="$(
|
||||
cat <<EOF
|
||||
{
|
||||
"dependencies": {
|
||||
"archiver": "5.3.1",
|
||||
"meshcentral": "${LATEST_MESH_VER}",
|
||||
"otplib": "10.2.3",
|
||||
"pg": "8.7.1",
|
||||
"pgtools": "0.3.2"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)"
|
||||
echo "${mesh_pkg}" >/meshcentral/package.json
|
||||
npm install
|
||||
sudo systemctl start meshcentral
|
||||
fi
|
||||
|
||||
# update from main repo
|
||||
cd /rmm
|
||||
git config user.email "admin@example.com"
|
||||
@@ -298,14 +341,14 @@ if ! [[ $CHECK_CELERY_CONFIG ]]; then
|
||||
sed -i 's/CELERYD_OPTS=.*/CELERYD_OPTS="--time-limit=86400 --autoscale=20,2"/g' /etc/conf.d/celery.conf
|
||||
fi
|
||||
|
||||
CHECK_ADMIN_ENABLED=$(grep ADMIN_ENABLED /rmm/api/tacticalrmm/tacticalrmm/local_settings.py)
|
||||
CHECK_ADMIN_ENABLED=$(grep ADMIN_ENABLED $local_settings)
|
||||
if ! [[ $CHECK_ADMIN_ENABLED ]]; then
|
||||
adminenabled="$(
|
||||
cat <<EOF
|
||||
ADMIN_ENABLED = False
|
||||
EOF
|
||||
)"
|
||||
echo "${adminenabled}" | tee --append /rmm/api/tacticalrmm/tacticalrmm/local_settings.py >/dev/null
|
||||
echo "${adminenabled}" | tee --append $local_settings >/dev/null
|
||||
fi
|
||||
|
||||
if [ "$arch" = "x86_64" ]; then
|
||||
@@ -342,10 +385,13 @@ if [ ! -d /opt/tactical/reporting/schemas ]; then
|
||||
sudo mkdir /opt/tactical/reporting/schemas
|
||||
fi
|
||||
|
||||
sed -i '/^REDIS_HOST/d' $local_settings
|
||||
|
||||
sudo chown -R ${USER}:${USER} /opt/tactical
|
||||
|
||||
python manage.py pre_update_tasks
|
||||
celery -A tacticalrmm purge -f
|
||||
printf >&2 "${GREEN}Running database migrations (this might take a long time)...${NC}\n"
|
||||
python manage.py migrate
|
||||
python manage.py generate_json_schemas
|
||||
python manage.py delete_tokens
|
||||
@@ -510,30 +556,5 @@ for i in nats nats-api rmm daphne celery celerybeat nginx; do
|
||||
sudo systemctl start ${i}
|
||||
done
|
||||
|
||||
CURRENT_MESH_VER=$(cd /meshcentral/node_modules/meshcentral && node -p -e "require('./package.json').version")
|
||||
if [[ "${CURRENT_MESH_VER}" != "${LATEST_MESH_VER}" ]] || [[ "$force" = true ]]; then
|
||||
printf >&2 "${GREEN}Updating meshcentral from ${CURRENT_MESH_VER} to ${LATEST_MESH_VER}${NC}\n"
|
||||
sudo systemctl stop meshcentral
|
||||
sudo chown ${USER}:${USER} -R /meshcentral
|
||||
cd /meshcentral
|
||||
rm -rf node_modules/ package.json package-lock.json
|
||||
mesh_pkg="$(
|
||||
cat <<EOF
|
||||
{
|
||||
"dependencies": {
|
||||
"archiver": "5.3.1",
|
||||
"meshcentral": "${LATEST_MESH_VER}",
|
||||
"otplib": "10.2.3",
|
||||
"pg": "8.7.1",
|
||||
"pgtools": "0.3.2"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)"
|
||||
echo "${mesh_pkg}" >/meshcentral/package.json
|
||||
npm install
|
||||
sudo systemctl start meshcentral
|
||||
fi
|
||||
|
||||
rm -f $TMP_SETTINGS
|
||||
printf >&2 "${GREEN}Update finished!${NC}\n"
|
||||
|
||||
Reference in New Issue
Block a user